[
  {
    "path": ".github/workflows/codeql.yml",
    "content": "# For most projects, this workflow file will not need changing; you simply need\n# to commit it to your repository.\n#\n# You may wish to alter this file to override the set of languages analyzed,\n# or to provide custom queries or build logic.\n#\n# ******** NOTE ********\n# We have attempted to detect the languages in your repository. Please check\n# the `language` matrix defined below to confirm you have the correct set of\n# supported CodeQL languages.\n#\nname: \"CodeQL\"\n\non:\n  push:\n    branches: [ \"main\" ]\n  pull_request:\n    # The branches below must be a subset of the branches above\n    branches: [ \"main\" ]\n  schedule:\n    - cron: '34 11 * * 4'\n\njobs:\n  analyze:\n    name: Analyze\n    runs-on: ubuntu-latest\n    permissions:\n      actions: read\n      contents: read\n      security-events: write\n\n    strategy:\n      fail-fast: false\n      matrix:\n        language: [ 'javascript', 'python' ]\n        # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]\n        # Use only 'java' to analyze code written in Java, Kotlin or both\n        # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both\n        # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support\n\n    steps:\n    - name: Checkout repository\n      uses: actions/checkout@v3\n\n    # Initializes the CodeQL tools for scanning.\n    - name: Initialize CodeQL\n      uses: github/codeql-action/init@v2 # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n      with:\n        languages: ${{ matrix.language }}\n        # If you wish to specify custom queries, you can do so here or in a config file.\n        # By default, queries listed here will override any specified in a config file.\n        # Prefix the list here with \"+\" to use these queries and those in the config file.\n\n        # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs\n        # queries: security-extended,security-and-quality\n\n\n    # Autobuild attempts to build any compiled languages  (C/C++, C#, Go, or Java).\n    # If this step fails, then you should remove it and run the build manually (see below)\n    - name: Autobuild\n      uses: github/codeql-action/autobuild@v2 # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n\n    # ℹ️ Command-line programs to run using the OS shell.\n    # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun\n\n    #   If the Autobuild fails above, remove it and uncomment the following three lines.\n    #   modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.\n\n    # - run: |\n    #   echo \"Run, Build Application using script\"\n    #   ./location_of_script_within_repo/buildscript.sh\n\n    - name: Perform CodeQL Analysis\n      uses: github/codeql-action/analyze@v2 # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n      with:\n        category: \"/language:${{matrix.language}}\"\n"
  },
  {
    "path": ".github/workflows/dependency-review.yml",
    "content": "# Dependency Review Action\n#\n# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging.\n#\n# Source repository: https://github.com/actions/dependency-review-action\n# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement\nname: 'Dependency Review'\non: [pull_request]\n\npermissions:\n  contents: read\n\njobs:\n  dependency-review:\n    runs-on: ubuntu-latest\n    steps:\n      - name: 'Checkout Repository'\n        uses: actions/checkout@v3\n      - name: 'Dependency Review'\n        uses: actions/dependency-review-action@v2\n"
  },
  {
    "path": ".github/workflows/deploy.yaml",
    "content": "name: Deploy FastKafka documentation to the GitHub Pages\n\non:\n  push:\n    branches: [ \"main\", \"master\"]\n  workflow_dispatch:\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: airtai/workflows/fastkafka-docusaurus-ghp@main # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n"
  },
  {
    "path": ".github/workflows/index-docs-for-fastkafka-chat.yaml",
    "content": "name: Index docs for fastkafka chat application\n\non:\n  workflow_run:\n    workflows: [\"pages-build-deployment\"]\n    types: [completed]\n\nenv:\n  OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}\n  PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}\n\njobs:\n  on-success:\n    name: Index docs for fastkafka chat application\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    if: ${{ github.event.workflow_run.conclusion == 'success' }}\n    steps:\n      - name: Checkout airtai/fastkafkachat repo\n        uses: actions/checkout@v3\n        with:\n          token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}\n          ref: ${{ github.head_ref }}\n          repository: airtai/fastkafkachat\n\n      - name: Setup Python\n        uses: actions/setup-python@v4\n        with:\n          python-version: \"3.9\"\n          cache: \"pip\"\n          cache-dependency-path: settings.ini\n\n      - name: Install Dependencies\n        shell: bash\n        run: |\n          set -ux\n          python -m pip install --upgrade pip\n          test -f setup.py && pip install -e \".[dev]\"\n\n      - name: Index the fastkafka docs\n        shell: bash\n        run: |\n          index_website_data\n\n      - name: Push updated index to airtai/fastkafkachat repo\n        uses: stefanzweifel/git-auto-commit-action@v4 # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n        with:\n          commit_message: \"Update fastkafka docs index file\"\n          file_pattern: \"data/website_index.zip\"\n"
  },
  {
    "path": ".github/workflows/test.yaml",
    "content": "name: CI\non:  [workflow_dispatch, push]\n\njobs:\n  mypy_static_analysis:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: airtai/workflows/airt-mypy-check@main # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n  bandit_static_analysis:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: airtai/workflows/airt-bandit-check@main # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n  semgrep_static_analysis:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: airtai/workflows/airt-semgrep-check@main # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n  test:\n    timeout-minutes: 60\n    strategy:\n      fail-fast: false\n      matrix:\n        os:  [ubuntu, windows]\n        version: [\"3.8\", \"3.9\", \"3.10\", \"3.11\"]\n    runs-on: ${{ matrix.os }}-latest\n    defaults:\n      run:\n        shell: bash\n    steps:\n      - name: Configure Pagefile\n        if: matrix.os == 'windows'\n        uses: al-cheb/configure-pagefile-action@v1.2 # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n        with:\n          minimum-size: 8GB\n          maximum-size: 8GB\n          disk-root: \"C:\"\n      - name: Install quarto\n        uses: quarto-dev/quarto-actions/setup@v2 # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n      - name: Prepare nbdev env\n        uses: fastai/workflows/nbdev-ci@master # nosemgrep: yaml.github-actions.security.third-party-action-not-pinned-to-commit-sha.third-party-action-not-pinned-to-commit-sha\n        with:\n          version: ${{ matrix.version }}\n          skip_test: true\n      - name: List pip deps\n        run: |\n          pip list\n      - name: Install testing deps\n        run: |\n          fastkafka docs install_deps\n          fastkafka testing install_deps\n      - name: Run nbdev tests\n        run: |\n          nbdev_test --timing --do_print --n_workers 1 --file_glob \"*_CLI*\" # Run CLI tests first one by one because of npm installation clashes with other tests\n          nbdev_test --timing --do_print --skip_file_glob \"*_CLI*\"\n      - name: Test building docs with nbdev-mkdocs\n        if: matrix.os != 'windows'\n        run: |\n          nbdev_mkdocs docs\n          if [ -f \"mkdocs/site/index.html\" ]; then\n            echo \"docs built successfully.\"\n          else\n            echo \"index page not found in rendered docs.\"\n            ls -la\n            ls -la mkdocs/site/\n            exit 1\n          fi\n\n  # https://github.com/marketplace/actions/alls-green#why\n  check: # This job does nothing and is only used for the branch protection\n    if: always()\n\n    needs:\n      - test\n      - mypy_static_analysis\n      - bandit_static_analysis\n      - semgrep_static_analysis\n\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Decide whether the needed jobs succeeded or failed\n        uses: re-actors/alls-green@release/v1 # nosemgrep\n        with:\n          jobs: ${{ toJSON(needs) }}\n\n"
  },
  {
    "path": ".gitignore",
    "content": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\npip-wheel-metadata/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n.python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# docusaurus documentation\ndocusaurus/node_modules\ndocusaurus/docs\ndocusaurus/build\n\ndocusaurus/.docusaurus\ndocusaurus/.cache-loader\n\ndocusaurus/.DS_Store\ndocusaurus/.env.local\ndocusaurus/.env.development.local\ndocusaurus/.env.test.local\ndocusaurus/.env.production.local\n\ndocusaurus/npm-debug.log*\ndocusaurus/yarn-debug.log*\ndocusaurus/yarn-error.log*\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# PyCharm\n.idea\n\n# nbdev related stuff\n.gitattributes\n.gitconfig\n_proc\n_docs\n\nnbs/asyncapi\nnbs/guides/asyncapi\nnbs/.last_checked\nnbs/_*.ipynb\ntoken\n*.bak\n\n# nbdev_mkdocs\nmkdocs/docs/\nmkdocs/site/\n\n# Ignore trashbins\n.Trash*\n\n.vscode\n"
  },
  {
    "path": ".pre-commit-config.yaml",
    "content": "# See https://pre-commit.com for more information\n# See https://pre-commit.com/hooks.html for more hooks\n\nrepos:\n-   repo: https://github.com/pre-commit/pre-commit-hooks\n    rev: \"v4.4.0\"\n    hooks:\n      #    -   id: trailing-whitespace\n      #    -   id: end-of-file-fixer\n      #    -   id: check-yaml\n    -   id: check-added-large-files\n\n- repo: https://github.com/PyCQA/bandit\n  rev: '1.7.5'\n  hooks:\n  - id: bandit\n\n    #- repo: https://github.com/returntocorp/semgrep\n    #  rev: \"v1.14.0\"\n    #  hooks:\n    #    - id: semgrep\n    #      name: Semgrep \n    #      args: [\"--config\", \"auto\", \"--error\"]\n    #      exclude: ^docker/\n\n"
  },
  {
    "path": ".semgrepignore",
    "content": "docker/\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Release notes\n\n<!-- do not remove -->\n\n## 0.8.0\n\n### New Features\n\n- Add support for Pydantic v2 ([#408](https://github.com/airtai/fastkafka/issues/408)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka now uses Pydantic v2 for serialization/deserialization of messages\n \n- Enable nbdev_test on windows and run CI tests on windows ([#356](https://github.com/airtai/fastkafka/pull/356)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n### Bugs Squashed\n\n- Fix ´fastkafka testing install deps´ failing ([#385](https://github.com/airtai/fastkafka/pull/385)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Create asyncapi docs directory only while building asyncapi docs ([#368](https://github.com/airtai/fastkafka/pull/368)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Add retries to producer in case of raised KafkaTimeoutError exception ([#423](https://github.com/airtai/fastkafka/pull/423)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n## 0.7.1\n\n### Bugs Squashed\n \n - Limit pydantic version to <2.0 ([#427](https://github.com/airtai/fastkafka/issues/427))\n\n - Fix Kafka broker version installation issues ([#427](https://github.com/airtai/fastkafka/issues/427))\n\n - Fix ApacheKafkaBroker startup issues ([#427](https://github.com/airtai/fastkafka/issues/427))\n\n## 0.7.0\n\n### New Features\n\n- Optional description argument to consumes and produces decorator implemented ([#338](https://github.com/airtai/fastkafka/pull/338)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumes and produces decorators now have optional `description` argument that is used instead of function docstring in async doc generation when specified\n\n- FastKafka Windows OS support enabled ([#326](https://github.com/airtai/fastkafka/pull/326)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now run on Windows\n\n- FastKafka and FastAPI integration implemented ([#304](https://github.com/airtai/fastkafka/pull/304)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now be run alongside FastAPI\n\n- Batch consuming option to consumers implemented ([#298](https://github.com/airtai/fastkafka/pull/298)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumers can consume events in batches by specifying msg type of consuming function as `List[YourMsgType]` \n\n- Removed support for synchronous produce functions ([#295](https://github.com/airtai/fastkafka/pull/295)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Added default broker values and update docs ([#292](https://github.com/airtai/fastkafka/pull/292)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n### Bugs Squashed\n\n- Fix index.ipynb to be runnable in colab ([#342](https://github.com/airtai/fastkafka/issues/342))\n\n- Use cli option root_path docs generate and serve CLI commands ([#341](https://github.com/airtai/fastkafka/pull/341)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Fix incorrect asyncapi docs path on fastkafka docs serve command ([#335](https://github.com/airtai/fastkafka/pull/335)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Serve docs now takes app `root_path` argument into consideration when specified in app\n\n- Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps ([#315](https://github.com/airtai/fastkafka/issues/315))\n\n- Fix logs printing timestamps ([#308](https://github.com/airtai/fastkafka/issues/308))\n\n- Fix topics with dots causing failure of tester instantiation ([#306](https://github.com/airtai/fastkafka/pull/306)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Specified topics can now have \".\" in their names\n\n## 0.6.0\n\n### New Features\n\n- Timestamps added to CLI commands ([#283](https://github.com/airtai/fastkafka/pull/283)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Added option to process messages concurrently ([#278](https://github.com/airtai/fastkafka/pull/278)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - A new `executor` option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.\n\n- Add consumes and produces functions to app ([#274](https://github.com/airtai/fastkafka/pull/274)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Add batching for producers ([#273](https://github.com/airtai/fastkafka/pull/273)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Fix broken links in guides ([#272](https://github.com/airtai/fastkafka/pull/272)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Generate the docusaurus sidebar dynamically by parsing summary.md ([#270](https://github.com/airtai/fastkafka/pull/270)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Metadata passed to consumer ([#269](https://github.com/airtai/fastkafka/pull/269)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(key): read the key value somehow..Maybe I missed something in the docs\n    requirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations.\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Contribution with instructions how to build and test added ([#255](https://github.com/airtai/fastkafka/pull/255)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Export encoders, decoders from fastkafka.encoder ([#246](https://github.com/airtai/fastkafka/pull/246)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n- Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. ([#239](https://github.com/airtai/fastkafka/issues/239))\n\n\n- UI Improvement: Post screenshots with links to the actual messages in testimonials section ([#228](https://github.com/airtai/fastkafka/issues/228))\n\n### Bugs Squashed\n\n- Batch testing fix ([#280](https://github.com/airtai/fastkafka/pull/280)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Tester breaks when using Batching or KafkaEvent producers ([#279](https://github.com/airtai/fastkafka/issues/279))\n\n- Consumer loop callbacks are not executing in parallel ([#276](https://github.com/airtai/fastkafka/issues/276))\n\n\n## 0.5.0\n\n### New Features\n\n- Significant speedup of Kafka producer ([#236](https://github.com/airtai/fastkafka/pull/236)), thanks to [@Sternakt](https://github.com/Sternakt)\n \n\n- Added support for AVRO encoding/decoding ([#231](https://github.com/airtai/fastkafka/pull/231)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n### Bugs Squashed\n\n- Fixed sidebar to include guides in docusaurus documentation ([#238](https://github.com/airtai/fastkafka/pull/238)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Fixed link to symbols in docusaurus docs ([#227](https://github.com/airtai/fastkafka/pull/227)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Removed bootstrap servers from constructor ([#220](https://github.com/airtai/fastkafka/pull/220)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n## 0.4.0\n\n### New Features\n\n- Integrate FastKafka chat ([#208](https://github.com/airtai/fastkafka/pull/208)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add benchmarking ([#206](https://github.com/airtai/fastkafka/pull/206)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Enable fast testing without running kafka locally ([#198](https://github.com/airtai/fastkafka/pull/198)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Generate docs using Docusaurus ([#194](https://github.com/airtai/fastkafka/pull/194)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add test cases for LocalRedpandaBroker ([#189](https://github.com/airtai/fastkafka/pull/189)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Reimplement patch and delegates from fastcore ([#188](https://github.com/airtai/fastkafka/pull/188)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Rename existing functions into start and stop and add lifespan handler ([#117](https://github.com/airtai/fastkafka/issues/117))\n  - https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios\n\n\n## 0.3.1\n\n-  README.md file updated\n\n\n## 0.3.0\n\n### New Features\n\n- Guide for FastKafka produces using partition key ([#172](https://github.com/airtai/fastkafka/pull/172)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #161\n\n- Add support for Redpanda for testing and deployment ([#181](https://github.com/airtai/fastkafka/pull/181)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Remove bootstrap_servers from __init__ and use the name of broker as an option when running/testing ([#134](https://github.com/airtai/fastkafka/issues/134))\n\n- Add a GH action file to check for broken links in the docs ([#163](https://github.com/airtai/fastkafka/issues/163))\n\n- Optimize requirements for testing and docs ([#151](https://github.com/airtai/fastkafka/issues/151))\n\n- Break requirements into base and optional for testing and dev ([#124](https://github.com/airtai/fastkafka/issues/124))\n  - Minimize base requirements needed just for running the service.\n\n- Add link to example git repo into guide for building docs using actions ([#81](https://github.com/airtai/fastkafka/issues/81))\n\n- Add logging for run_in_background ([#46](https://github.com/airtai/fastkafka/issues/46))\n\n- Implement partition Key mechanism for producers ([#16](https://github.com/airtai/fastkafka/issues/16))\n\n### Bugs Squashed\n\n- Implement checks for npm installation and version ([#176](https://github.com/airtai/fastkafka/pull/176)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #158 by checking if the npx is installed and more verbose error handling\n\n- Fix the helper.py link in CHANGELOG.md ([#165](https://github.com/airtai/fastkafka/issues/165))\n\n- fastkafka docs install_deps fails ([#157](https://github.com/airtai/fastkafka/issues/157))\n  - Unexpected internal error: [Errno 2] No such file or directory: 'npx'\n\n- Broken links in docs ([#141](https://github.com/airtai/fastkafka/issues/141))\n\n- fastkafka run is not showing up in CLI docs ([#132](https://github.com/airtai/fastkafka/issues/132))\n\n\n## 0.2.3\n\n- Fixed broken links on PyPi index page\n\n\n## 0.2.2\n\n### New Features\n\n- Extract JDK and Kafka installation out of LocalKafkaBroker ([#131](https://github.com/airtai/fastkafka/issues/131))\n\n- PyYAML version relaxed ([#119](https://github.com/airtai/fastkafka/pull/119)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Replace docker based kafka with local ([#68](https://github.com/airtai/fastkafka/issues/68))\n  - [x] replace docker compose with a simple docker run (standard run_jupyter.sh should do)\n  - [x] replace all tests to use LocalKafkaBroker\n  - [x] update documentation\n\n### Bugs Squashed\n\n- Fix broken link for FastKafka docs in index notebook ([#145](https://github.com/airtai/fastkafka/issues/145))\n\n- Fix encoding issues when loading setup.py on windows OS ([#135](https://github.com/airtai/fastkafka/issues/135))\n\n\n## 0.2.0\n\n### New Features\n\n- Replace kafka container with LocalKafkaBroker ([#112](https://github.com/airtai/fastkafka/issues/112))\n  - - [x] Replace kafka container with LocalKafkaBroker in tests\n- [x] Remove kafka container from tests environment\n- [x] Fix failing tests\n\n### Bugs Squashed\n\n- Fix random failing in CI ([#109](https://github.com/airtai/fastkafka/issues/109))\n\n\n## 0.1.3\n\n- version update in __init__.py\n\n\n## 0.1.2\n\n### New Features\n\n\n- Git workflow action for publishing Kafka docs ([#78](https://github.com/airtai/fastkafka/issues/78))\n\n\n### Bugs Squashed\n\n- Include missing requirement ([#110](https://github.com/airtai/fastkafka/issues/110))\n  - [x] Typer is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py) but it is not included in [settings.ini](https://github.com/airtai/fastkafka/blob/main/settings.ini)\n  - [x] Add aiohttp which is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py)\n  - [x] Add nbformat which is imported in _components/helpers.py\n  - [x] Add nbconvert which is imported in _components/helpers.py\n\n\n## 0.1.1\n\n\n### Bugs Squashed\n\n- JDK install fails on Python 3.8 ([#106](https://github.com/airtai/fastkafka/issues/106))\n\n\n\n## 0.1.0\n\nInitial release\n"
  },
  {
    "path": "CNAME",
    "content": "fastkafka.airt.ai\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing to FastKafka\n\nFirst off, thanks for taking the time to contribute! ❤️\n\nAll types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉\n\n> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:\n> - Star the project\n> - Tweet about it\n> - Refer this project in your project's readme\n> - Mention the project at local meetups and tell your friends/colleagues\n\n## Table of Contents\n\n- [I Have a Question](#i-have-a-question)\n- [I Want To Contribute](#i-want-to-contribute)\n  - [Reporting Bugs](#reporting-bugs)\n  - [Suggesting Enhancements](#suggesting-enhancements)\n  - [Your First Code Contribution](#your-first-code-contribution)\n- [Development](#development)\n    - [Prepare the dev environment](#prepare-the-dev-environment)\n    - [Way of working](#way-of-working)\n    - [Before a PR](#before-a-pr)\n\n\n\n## I Have a Question\n\n> If you want to ask a question, we assume that you have read the available [Documentation](https://fastkafka.airt.ai/docs).\n\nBefore you ask a question, it is best to search for existing [Issues](https://github.com/airtai/fastkafka/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.\n\nIf you then still feel the need to ask a question and need clarification, we recommend the following:\n\n- Contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new)\n    - Provide as much context as you can about what you're running into\n\nWe will then take care of the issue as soon as possible.\n\n## I Want To Contribute\n\n> ### Legal Notice \n> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.\n\n### Reporting Bugs\n\n#### Before Submitting a Bug Report\n\nA good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.\n\n- Make sure that you are using the latest version.\n- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](https://fastkafka.airt.ai/docs). If you are looking for support, you might want to check [this section](#i-have-a-question)).\n- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/airtai/fastkafka/issues?q=label%3Abug).\n- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.\n- Collect information about the bug:\n  - Stack trace (Traceback)\n  - OS, Platform and Version (Windows, Linux, macOS, x86, ARM)\n  - Python version\n  - Possibly your input and the output\n  - Can you reliably reproduce the issue? And can you also reproduce it with older versions?\n\n#### How Do I Submit a Good Bug Report?\n\nWe use GitHub issues to track bugs and errors. If you run into an issue with the project:\n\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)\n- Explain the behavior you would expect and the actual behavior.\n- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.\n- Provide the information you collected in the previous section.\n\nOnce it's filed:\n\n- The project team will label the issue accordingly.\n- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.\n- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented.\n\n### Suggesting Enhancements\n\nThis section guides you through submitting an enhancement suggestion for FastKafka, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.\n\n#### Before Submitting an Enhancement\n\n- Make sure that you are using the latest version.\n- Read the [documentation](https://fastkafka.airt.ai/docs) carefully and find out if the functionality is already covered, maybe by an individual configuration.\n- Perform a [search](https://github.com/airtai/fastkafka/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.\n- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.\n- If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n\n#### How Do I Submit a Good Enhancement Suggestion?\n\nEnhancement suggestions are tracked as [GitHub issues](https://github.com/airtai/fastkafka/issues).\n\n- Use a **clear and descriptive title** for the issue to identify the suggestion.\n- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.\n- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.\n- **Explain why this enhancement would be useful** to most FastKafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.\n\n### Your First Code Contribution\n\nA great way to start contributing to FastKafka would be by solving an issue tagged with \"good first issue\". To find a list of issues that are tagged as \"good first issue\" and are suitable for newcomers, please visit the following link: [Good first issues](https://github.com/airtai/fastkafka/labels/good%20first%20issue)\n\nThese issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in [Way of working](#way-of-working) and [Before a PR](#before-a-pr), and help us make FastKafka even better!\n\nIf you have any questions or need further assistance, feel free to reach out to us. Happy coding!\n\n## Development\n\n### Prepare the dev environment\n\nTo start contributing to FastKafka, you first have to prepare the development environment.\n\n#### Clone the FastKafka repository\n\nTo clone the repository, run the following command in the CLI:\n\n```shell\ngit clone https://github.com/airtai/fastkafka.git\n```\n\n#### Optional: create a virtual python environment\n\nTo prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your FastKafka project by running:\n\n```shell\npython3 -m venv fastkafka-env\n```\n\nAnd to activate your virtual environment run:\n\n```shell\nsource fastkafka-env/bin/activate\n```\n\nTo learn more about virtual environments, please have a look at [official python documentation](https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available.)\n\n#### Install FastKafka\n\nTo install FastKafka, navigate to the root directory of the cloned FastKafka project and run:\n\n```shell\npip install fastkafka -e [.\"dev\"]\n```\n\n#### Install JRE and Kafka toolkit\n\nTo be able to run tests and use all the functionalities of FastKafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:\n\n1. Use our `fastkafka testing install-deps` CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR\n2. Install JRE and Kafka manually.\n   To do this, please refer to [JDK and JRE installation guide](https://docs.oracle.com/javase/9/install/toc.htm) and [Apache Kafka quickstart](https://kafka.apache.org/quickstart)\n   \n#### Install npm\n\nTo be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:\n\n1. Use our `fastkafka docs install_deps` CLI command which will install npm for you in your .local folder\nOR\n2. Install npm manually.\n   To do this, please refer to [NPM installation guide](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)\n   \n#### Install docusaurus\n\nTo generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of FastKafka project.\n\n#### Check if everything works\n\nAfter installing FastKafka and all the necessary dependencies, run `nbdev_test` in the root of FastKafka project. This will take a couple of minutes as it will run all the tests on FastKafka project. If everythng is setup correctly, you will get a \"Success.\" message in your terminal, otherwise please refer to previous steps.\n\n### Way of working\n\nThe development of FastKafka is done in Jupyter notebooks. Inside the `nbs` directory you will find all the source code of FastKafka, this is where you will implement your changes.\n\nThe testing, cleanup and exporting of the code is being handled by `nbdev`, please, before starting the work on FastKafka, get familiar with it by reading [nbdev documentation](https://nbdev.fast.ai/getting_started.html).\n\nThe general philosopy you should follow when writing code for FastKafka is:\n\n- Function should be an atomic functionality, short and concise\n   - Good rule of thumb: your function should be 5-10 lines long usually\n- If there are more than 2 params, enforce keywording using *\n   - E.g.: `def function(param1, *, param2, param3): ...`\n- Define typing of arguments and return value\n   - If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected\n- After the function cell, write test cells using the assert keyword\n   - Whenever you implement something you should test that functionality immediately in the cells below \n- Add Google style python docstrings when function is implemented and tested\n\n### Before a PR\n\nAfter you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of FastKafka project):\n\n1. Format your notebooks: `nbqa black nbs`\n2. Close, shutdown, and clean the metadata from your notebooks: `nbdev_clean`\n3. Export your code: `nbdev_export`\n4. Run the tests: `nbdev_test`\n5. Test code typing: `mypy fastkafka`\n6. Test code safety with bandit: `bandit -r fastkafka`\n7. Test code safety with semgrep: `semgrep --config auto -r fastkafka`\n\nWhen you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.\n\n## Attribution\nThis guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "include settings.ini\ninclude LICENSE\ninclude CONTRIBUTING.md\ninclude README.md\nrecursive-exclude * __pycache__\n"
  },
  {
    "path": "README.md",
    "content": "# FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n<b>Effortless Kafka integration for your web services</b>\n\n## Deprecation notice\n\nThis project is superceeded by\n[FastStream](https://github.com/airtai/faststream).\n\nFastStream is a new package based on the ideas and experiences gained\nfrom [FastKafka](https://github.com/airtai/fastkafka) and\n[Propan](https://github.com/lancetnik/propan). By joining our forces, we\npicked up the best from both packages and created the unified way to\nwrite services capable of processing streamed data regradless of the\nunderliying protocol.\n\nWe’ll continue to maintain FastKafka package, but new development will\nbe in [FastStream](https://github.com/airtai/faststream). If you are\nstarting a new service,\n[FastStream](https://github.com/airtai/faststream) is the recommended\nway to do it.\n\n------------------------------------------------------------------------\n\n![PyPI](https://img.shields.io/pypi/v/fastkafka.png) ![PyPI -\nDownloads](https://img.shields.io/pypi/dm/fastkafka.png) ![PyPI - Python\nVersion](https://img.shields.io/pypi/pyversions/fastkafka.png)\n\n![GitHub Workflow\nStatus](https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml)\n![CodeQL](https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg)\n![Dependency\nReview](https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg)\n\n![GitHub](https://img.shields.io/github/license/airtai/fastkafka.png)\n\n------------------------------------------------------------------------\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n------------------------------------------------------------------------\n\n#### ⭐⭐⭐ Stay in touch ⭐⭐⭐\n\nPlease show your support and stay in touch by:\n\n- giving our [GitHub repository](https://github.com/airtai/fastkafka/) a\n  star, and\n\n- joining our [Discord server](https://discord.gg/CJWmYpyFbc).\n\nYour support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!\n\n------------------------------------------------------------------------\n\n#### 🐝🐝🐝 We were busy lately 🐝🐝🐝\n\n![Activity](https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg \"Repobeats analytics image\")\n\n## Install\n\nFastKafka works on Windows, macOS, Linux, and most Unix-style operating\nsystems. You can install base version of FastKafka with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\nTo install FastKafka with testing features please use:\n\n``` sh\npip install fastkafka[test]\n```\n\nTo install FastKafka with asyncapi docs please use:\n\n``` sh\npip install fastkafka[docs]\n```\n\nTo install FastKafka with all the features please use:\n\n``` sh\npip install fastkafka[test,docs]\n```\n\n## Tutorial\n\nYou can start an interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb\" target=”_blank”>\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open in Colab\" />\n</a>\n\n## Writing server code\n\nTo demonstrate FastKafka simplicity of using `@produces` and `@consumes`\ndecorators, we will focus on a simple app.\n\nThe app will consume JSON messages containing positive floats from one topic, log\nthem, and then produce incremented values to another topic.\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines one `Data` mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n`NonNegativeFloat` field `data` that will be logged and “processed”\nbefore being produced to another topic.\n\nThese message class will be used to parse and validate incoming data in\nKafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker.\n\nNext, an object of the\n[`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\nWe will also import and create a logger so that we can log the incoming\ndata in our consuming function.\n\n``` python\nfrom logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger(\"Demo Kafka app\")\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data” Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `Data` message class. Specifying the type of the\n  single argument is instructing the Pydantic to use `Data.parse_raw()`\n  on the consumed message before passing it to the user defined function\n  `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_output_data` function,\n  which specifies that this function should produce a message to the\n  “output_data” Kafka topic whenever it is called. The `to_output_data`\n  function takes a single float argument `data`. It it increments the\n  data returns it wrapped in a `Data` object. The framework will call\n  the `Data.json().encode(\"utf-8\")` function on the returned value and\n  produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\ninstances which internally starts InMemory implementation of Kafka\nbroker.\n\nThe Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies.\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = Data(\n    data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send Data message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with incremented data in output_data topic\n    await tester.awaited_mocks.on_output_data.assert_awaited_with(\n        Data(data=1.1), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] Demo Kafka app: Got data: 0.1\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a simple FastKafka application. The app will consume the\n`Data` from the `input_data` topic, log it and produce the incremented\ndata to `output_data` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purposes\n\n3.  Sent Data message to `input_data` topic\n\n4.  Asserted and checked that the developed service has reacted to Data\n    message\n\n## Running the service\n\nThe service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\nTo run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command.\n\n``` sh\nfastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n```\n\nAfter running the command, you should see the following output in your\ncommand line:\n\n    [1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n    [1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n    23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nAsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfollowing command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n    23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\nThis will generate the *asyncapi* folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxr-xr-x 4 root root 4096 May 31 11:38 docs\n    drwxr-xr-x 2 root root 4096 May 31 11:38 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET / HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/global.min.css HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /js/asyncapi-ui.min.js HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/asyncapi.min.css HTTP/1.1\" 200 -\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n\n## License\n\nFastKafka is licensed under the Apache License 2.0\n\nA permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code.\n\nThe full text of the license can be found\n[here](https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE).\n"
  },
  {
    "path": "docker/.semgrepignore",
    "content": "dev.yml\n\n"
  },
  {
    "path": "docker/dev.yml",
    "content": "version: \"3\"\nservices:\n    fastkafka-devel:  #nosemgrep\n        image: ghcr.io/airtai/nbdev-mkdocs\n        hostname: $DOCKER_COMPOSE_PROJECT-devel\n        container_name: $DOCKER_COMPOSE_PROJECT-devel\n        ports:\n            - \"${PORT_PREFIX}8888:8888\"\n            - \"${PORT_PREFIX}4000:4000\"\n            - \"${PORT_PREFIX}6006:6006\"\n        volumes:\n            - $AIRT_PROJECT:/work/fastkafka\n            - /etc/passwd:/etc/passwd\n            - /etc/group:/etc/group\n            - /etc/shadow:/etc/shadow\n            - $HOME/.ssh:$HOME/.ssh\n            - $HOME/.gitconfig:/root/.gitconfig\n        environment:\n            USER: $USER\n            USERNAME: $USERNAME\n            PRESERVE_ENVS: $PRESERVE_ENVS\n            OPENAI_API_KEY: $OPENAI_API_KEY\n"
  },
  {
    "path": "docusaurus/babel.config.js",
    "content": "module.exports = {\n  presets: [require.resolve('@docusaurus/core/lib/babel/preset')],\n};\n"
  },
  {
    "path": "docusaurus/docusaurus.config.js",
    "content": "// @ts-check\n// Note: type annotations allow type checking and IDEs autocompletion\n\nconst lightCodeTheme = require('prism-react-renderer/themes/github');\nconst darkCodeTheme = require('prism-react-renderer/themes/dracula');\n\nmodule.exports = async function configCreatorAsync() {\n  /** @type {import('@docusaurus/types').Config} */\n  const config = {\n    title: 'FastKafka',\n    tagline: 'Effortless Kafka integration for web services',\n    customFields: {\n      description:\n        'Powerful and easy-to-use open-source framework for building asynchronous web services that interact with Kafka.',\n    },\n    favicon: 'img/AIRT_icon_blue.svg',\n\n    // Set the production url of your site here\n    url: 'https://fastkafka.airt.ai/',\n    // Set the /<baseUrl>/ pathname under which your site is served\n    // For GitHub pages deployment, it is often '/<projectName>/'\n    baseUrl: '/',\n\n    // GitHub pages deployment config.\n    // If you aren't using GitHub pages, you don't need these.\n    organizationName: 'airt', // Usually your GitHub org/user name.\n    projectName: 'fastkafka', // Usually your repo name.\n    trailingSlash: true,\n    onBrokenLinks: 'warn',\n    onBrokenMarkdownLinks: 'warn',\n\n    // Even if you don't use internalization, you can use this field to set useful\n    // metadata like html lang. For example, if your site is Chinese, you may want\n    // to replace \"en\" with \"zh-Hans\".\n    i18n: {\n      defaultLocale: 'en',\n      locales: ['en'],\n    },\n\n    presets: [\n      [\n        'classic',\n        /** @type {import('@docusaurus/preset-classic').Options} */\n        ({\n          docs: {\n            sidebarPath: require.resolve('./sidebars.js'),\n            // Please change this to your repo.\n            // Remove this to remove the \"edit this page\" links.\n  //           editUrl:\n  //             'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',\n            exclude: [\n              // '**/_*.{js,jsx,ts,tsx,md,mdx}',\n              // '**/_*/**',\n              '**/*.test.{js,jsx,ts,tsx}',\n              '**/__tests__/**',\n            ],\n            versions: {\n              current: {\n                label: `dev 🚧`,\n              },\n            },\n          },\n          blog: {\n            showReadingTime: true,\n            // Please change this to your repo.\n            // Remove this to remove the \"edit this page\" links.\n  //           editUrl:\n  //             'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',\n          },\n          theme: {\n            customCss: require.resolve('./src/css/custom.css'),\n          },\n          gtag: {\n            trackingID: 'G-WLMWPELHMB',\n          },\n        }),\n      ],\n    ],\n\n    themeConfig:\n      /** @type {import('@docusaurus/preset-classic').ThemeConfig} */\n      ({\n        algolia: {\n          appId: 'EHYNSIUGMY',\n          // Public API key: it is safe to commit it\n          // nosemgrep\n          apiKey: '2680cd13947844a00a5a657b959e6211',\n          indexName: 'fastkafka-airt',\n        },\n        // Replace with your project's social card\n        image: 'https://opengraph.githubassets.com/1671805243.560327/airtai/fastkafka',\n        // colorMode: {\n        //   disableSwitch: true,\n        // },\n        navbar: {\n          title: 'airt',\n          logo: {\n            alt: 'airt logo',\n            src: 'img/AIRT_icon_blue.svg',\n            href: 'https://airt.ai',\n            target: '_blank'\n          },\n          items: [\n            {to: '/', html: '<div><img src=\"/img/home-icon.svg\"><p>FastKafka</p></div>', position: 'right', className: 'fastkafka-home'},\n            {\n              type: 'docsVersionDropdown',\n              position: 'right',\n              dropdownActiveClassDisabled: true,\n              // dropdownItemsAfter: [{to: '/versions', label: 'All versions'}],\n            },\n            {\n              type: 'docSidebar',\n              sidebarId: 'tutorialSidebar',\n              position: 'right',\n              label: 'Docs',\n            },\n  //           {to: '/blog', label: 'Blog', position: 'left'},\n            {\n              type: 'html',\n              position: 'right',\n              className: 'github-stars',\n              value: '<iframe src=\"https://ghbtns.com/github-btn.html?user=airtai&repo=fastkafka&type=star&count=true&size=large\" frameborder=\"0\" scrolling=\"0\" width=\"170\" height=\"30\" title=\"GitHub\"></iframe>',\n            },\n            {\n              href: 'https://discord.gg/CJWmYpyFbc',\n              position: 'right',\n              className: \"header-discord-link\",\n              \"aria-label\": \"Discord Link\",\n            },\n            {to: '/', html: '<div><img src=\"/img/home-icon.svg\"></div>', position: 'right', className: 'fastkafka-home-mobile'},\n          ],\n        },\n        footer: {\n          style: 'dark',\n          links: [\n            {\n              title: 'COMMUNITY',\n              items: [\n                {\n                  html: `\n                      <a class=\"footer-discord-link\" href=\"https://discord.gg/CJWmYpyFbc\" target=\"_blank\" rel=\"noreferrer noopener\" aria-label=\"Discord link\"></a>\n                    `,\n                },\n                {\n                  html: `\n                      <a class=\"footer-github-link\" href=\"https://github.com/airtai\" target=\"_blank\" rel=\"noreferrer noopener\" aria-label=\"Github link\"></a>\n                    `,\n                },\n                {\n                  html: `\n                      <a class=\"footer-twitter-link\" href=\"https://twitter.com/airt_AI\" target=\"_blank\" rel=\"noreferrer noopener\" aria-label=\"Twitter link\"></a>\n                    `,\n                },\n                {\n                  html: `\n                      <a class=\"footer-facebook-link\" href=\"https://www.facebook.com/airt.ai.api/\" target=\"_blank\" rel=\"noreferrer noopener\" aria-label=\"Facebook link\"></a>\n                    `,\n                },\n                {\n                  html: `\n                      <a class=\"footer-linkedin-link\" href=\"https://www.linkedin.com/company/airt-ai/\" target=\"_blank\" rel=\"noreferrer noopener\" aria-label=\"LinkedIn link\"></a>\n                    `,\n                },\n              ],\n            },\n            {\n              title: 'EXPLORE DOCS',\n              items: [\n                {\n                  label: 'Get Started',\n                  to: '/docs',\n                },\n              ],\n            },\n            {\n              title: 'EXPLORE MORE',\n              items: [\n                {\n                  label: 'News',\n                  to: 'https://airt.ai/news',\n                },\n                {\n                  label: 'About Us',\n                  to: 'https://airt.ai/about-us',\n                },\n                {\n                  label: 'Company information',\n                  to: 'https://airt.ai/company-information',\n                },\n                // {\n                //   label: 'Contact',\n                //   to: 'contact',\n                // },\n                \n              ],\n            },\n          ],\n          copyright: `© 2023 airt. All rights reserved.`,\n        },\n        // prism: {\n        //   theme: lightCodeTheme,\n        //   darkTheme: darkCodeTheme,\n        // },\n        prism: {\n          theme: ( await import('./src/utils/prismLight.mjs')).default,\n          darkTheme: ( await import('./src/utils/prismDark.mjs')).default,\n        },\n      }),\n  };\n  return config\n};\n"
  },
  {
    "path": "docusaurus/package.json",
    "content": "{\n  \"name\": \"fastkafka\",\n  \"version\": \"0.0.0\",\n  \"private\": true,\n  \"scripts\": {\n    \"docusaurus\": \"docusaurus\",\n    \"start\": \"docusaurus start --host 0.0.0.0 --port 4000\",\n    \"build\": \"docusaurus build\",\n    \"swizzle\": \"docusaurus swizzle\",\n    \"deploy\": \"docusaurus deploy\",\n    \"clear\": \"docusaurus clear\",\n    \"serve\": \"docusaurus serve  --host 0.0.0.0 --port 4000\",\n    \"write-translations\": \"docusaurus write-translations\",\n    \"write-heading-ids\": \"docusaurus write-heading-ids\"\n  },\n  \"dependencies\": {\n    \"@docusaurus/core\": \"2.4.0\",\n    \"@docusaurus/preset-classic\": \"2.4.0\",\n    \"@mdx-js/react\": \"^1.6.22\",\n    \"clsx\": \"^1.2.1\",\n    \"prism-react-renderer\": \"^1.3.5\",\n    \"react\": \"^17.0.2\",\n    \"react-accessible-accordion\": \"^5.0.0\",\n    \"react-dom\": \"^17.0.2\",\n    \"react-iframe\": \"^1.8.5\",\n    \"react-youtube\": \"^10.1.0\"\n  },\n  \"devDependencies\": {\n    \"@docusaurus/module-type-aliases\": \"2.4.0\"\n  },\n  \"browserslist\": {\n    \"production\": [\n      \">0.5%\",\n      \"not dead\",\n      \"not op_mini all\"\n    ],\n    \"development\": [\n      \"last 1 chrome version\",\n      \"last 1 firefox version\",\n      \"last 1 safari version\"\n    ]\n  },\n  \"engines\": {\n    \"node\": \">=16.14\"\n  }\n}\n"
  },
  {
    "path": "docusaurus/scripts/build_docusaurus_docs.sh",
    "content": "#!/bin/bash\n\n# exit when any command fails\nset -e\n\necho \"Cleanup existing build artifacts\"\nrm -rf docusaurus/docs\n\necho \"Runing nbdev_mkdocs docs\"\nmkdir -p mkdocs/docs\ncp LICENSE mkdocs/docs/LICENSE.md\ncp CONTRIBUTING.md mkdocs/docs\nnbdev_mkdocs docs\n\necho \"Copying newly generated markdown files to docusaurus directory\"\ncp -r mkdocs/docs docusaurus/\n\necho \"Generating sidebars.js\"\npython3 -c \"from fastkafka._docusaurus_helper import generate_sidebar; generate_sidebar('./docusaurus/docs/SUMMARY.md', './docusaurus/sidebars.js')\"\n\necho \"Deleting the markdown files from the docs directory that are not present in the sidebar.\"\npython3 -c \"from fastkafka._docusaurus_helper import delete_unused_markdown_files_from_sidebar; delete_unused_markdown_files_from_sidebar('./docusaurus/docs', './docusaurus/sidebars.js')\"\n\necho \"Generating API docs\"\npython3 -c \"from fastkafka._docusaurus_helper import fix_invalid_syntax_in_markdown, generate_markdown_docs; fix_invalid_syntax_in_markdown('./docusaurus/docs'); generate_markdown_docs('fastkafka', './docusaurus/docs')\"\n\necho \"Runing docusaurus build\"\ncd docusaurus && npm run build\n\necho \"Checking and creating new document version...\"\nsettings_file=\"../settings.ini\"\ndocs_versioning_flag=$( { grep '^docs_versioning[[:space:]]*=' \"$settings_file\" || [[ $? == 1 ]]; } | awk -F = '{print $2}' | xargs)\n\nif [ \"$docs_versioning_flag\" == \"minor\" ]; then\n    echo \"Error: minor versioning is not supported when using Docusaurus static site generator. Use patch to create new document version or None to disable document versioning.\" >&2\n    exit 1\nfi\n\nif [ -z \"$docs_versioning_flag\" ]; then\n    docs_versioning_flag=\"None\"\nfi\n\nif [ \"$docs_versioning_flag\" != \"patch\" ] && [ \"$docs_versioning_flag\" != \"None\" ]; then\n    echo \"Error: Invalid value set for 'docs_versioning' in settings.ini file: $docs_versioning_flag. Allowed values are patch or None.\" >&2\n    exit 1\nfi\n\ndocs_version_file=\"versions.json\"\nif [ \"$docs_versioning_flag\" == \"patch\" ]; then\n    echo \"Document versioning is enabled.\"\n    lib_version=$(grep '^version[[:space:]]*=' \"$settings_file\" | awk -F = '{print $2}' | xargs)\n    pat=\"^[0-9]+([.][0-9]+)*$\"\n    if [[ $lib_version =~ $pat ]]; then\n        if [ -f \"$docs_version_file\" ]; then\n            if grep -q \"\\\"$lib_version\\\"\" \"$docs_version_file\"; then\n                echo \"Document version already exists: '$lib_version'\"\n            else\n                npm run docusaurus docs:version $lib_version\n            fi\n        else\n            npm run docusaurus docs:version $lib_version\n        fi\n    else\n        echo \"Canary document version updated: '$lib_version'\"\n    fi\nelif [ \"$docs_versioning_flag\" == \"None\" ]; then\n    echo \"Document versioning is disabled.\"\n    if [ -f \"$docs_version_file\" ]; then\n        echo \"Deleting previously created document versions.\"\n        rm -rf versioned_docs versioned_sidebars versions.json\n        echo \"Successfully deleted all previous document versions.\"\n    fi\nfi\n\necho -e \"\\e[36;1m[INFO]\\e[0m Creating a compressed archive of the generated Markdown files. This file is essential for implementing semantic search in the FastFafka-Gen library.\"\ncd ../ && mkdir -p .fastkafka_gen\nfind \"./docusaurus/docs/\" -type f -name \"*.md\" | tar -czvf \".fastkafka_gen/site_md_archive.tar.gz\" -T -\necho -e \"\\e[36;1m[INFO]\\e[0m Markdown files have been successfully compressed and saved in: .fastkafka_gen/site_md_archive.tar.gz\"\n"
  },
  {
    "path": "docusaurus/scripts/install_docusaurus_deps.sh",
    "content": "#!/bin/bash\n\necho \"Install docusaurus dependencies\"\ncd docusaurus && npm install\n"
  },
  {
    "path": "docusaurus/scripts/serve_docusaurus_docs.sh",
    "content": "#!/bin/bash\n\necho \"Serve docusaurus documentation\"\ncd docusaurus && npm run start\n\n"
  },
  {
    "path": "docusaurus/scripts/update_readme.sh",
    "content": "#!/bin/bash\n\n# exit when any command fails\nset -e\n\necho \"Run nbdev_readme and fix symbol links\"\npython3 -c \"from fastkafka._docusaurus_helper import update_readme; update_readme()\"\n"
  },
  {
    "path": "docusaurus/sidebars.js",
    "content": "module.exports = {\ntutorialSidebar: [\n    'index', {'Guides': \n    [{'Writing services': ['guides/Guide_11_Consumes_Basics', 'guides/Guide_12_Batch_Consuming', 'guides/Guide_21_Produces_Basics', 'guides/Guide_22_Partition_Keys', 'guides/Guide_23_Batch_Producing', 'guides/Guide_05_Lifespan_Handler', 'guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka', 'guides/Guide_24_Using_Multiple_Kafka_Clusters']}, {'Testing': ['guides/Guide_33_Using_Tester_class_to_test_fastkafka', 'guides/Guide_31_Using_redpanda_to_test_fastkafka']}, {'Documentation generation': ['guides/Guide_04_Github_Actions_Workflow']}, {'Deployment': ['guides/Guide_30_Using_docker_to_deploy_fastkafka', 'guides/Guide_32_Using_fastapi_to_run_fastkafka_application']}, {'Benchmarking': ['guides/Guide_06_Benchmarking_FastKafka']}]},{'API': ['api/fastkafka/EventMetadata', 'api/fastkafka/FastKafka', 'api/fastkafka/KafkaEvent', {'encoder': ['api/fastkafka/encoder/AvroBase', 'api/fastkafka/encoder/avro_decoder', 'api/fastkafka/encoder/avro_encoder', 'api/fastkafka/encoder/avsc_to_pydantic', 'api/fastkafka/encoder/json_decoder', 'api/fastkafka/encoder/json_encoder']}, {'executors': ['api/fastkafka/executors/DynamicTaskExecutor', 'api/fastkafka/executors/SequentialExecutor']}, {'testing': ['api/fastkafka/testing/ApacheKafkaBroker', 'api/fastkafka/testing/LocalRedpandaBroker', 'api/fastkafka/testing/Tester']}]},{'CLI': ['cli/fastkafka', 'cli/run_fastkafka_server_process']},\n    \"LICENSE\",\n    \"CONTRIBUTING\",\n    \"CHANGELOG\",\n],\n};"
  },
  {
    "path": "docusaurus/src/components/BrowserWindow/index.js",
    "content": "/**\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nimport React from 'react';\nimport clsx from 'clsx';\n\nimport styles from './styles.module.css';\n\nexport default function BrowserWindow({\n  children,\n  minHeight,\n  url = '',\n  style,\n  bodyStyle,\n}) {\n  return (\n    <div className={styles.browserWindow} style={{...style, minHeight}}>\n      <div className={styles.browserWindowHeader}>\n        <div className={styles.buttons}>\n          <span className={styles.dot} style={{background: '#f25f58'}} />\n          <span className={styles.dot} style={{background: '#fbbe3c'}} />\n          <span className={styles.dot} style={{background: '#58cb42'}} />\n        </div>\n        <div className={clsx(styles.browserWindowAddressBar, 'text--truncate')}>\n          {url}\n        </div>\n        <div className={styles.browserWindowMenuIcon}>\n          <div>\n            <span className={styles.bar} />\n            <span className={styles.bar} />\n            <span className={styles.bar} />\n          </div>\n        </div>\n      </div>\n\n      <div className={styles.browserWindowBody} style={bodyStyle}>\n        {children}\n      </div>\n    </div>\n  );\n}\n\n// Quick and dirty component, to improve later if needed\nexport function IframeWindow({url}) {\n  return (\n    <div style={{padding: 10}}>\n      <BrowserWindow\n        url={url}\n        style={{minWidth: '40vw', maxWidth: 400}}\n        bodyStyle={{padding: 0}}>\n        <iframe src={url} title={url} style={{width: '100%', height: 300}} />\n      </BrowserWindow>\n    </div>\n  );\n}"
  },
  {
    "path": "docusaurus/src/components/BrowserWindow/styles.module.css",
    "content": "/**\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\n.browserWindow {\n  border: 1px solid #fff;\n  border-radius: var(--ifm-global-radius);\n  box-shadow: rgba(0, 0, 0, 0.35) 0px 5px 15px;\n  margin-bottom: var(--ifm-leading);\n}\n\n.browserWindowHeader {\n  align-items: center;\n  background: #ebedf0;\n  display: flex;\n  padding: 0.5rem 1rem;\n}\n\n.row::after {\n  content: \"\";\n  display: table;\n  clear: both;\n}\n\n.buttons {\n  white-space: nowrap;\n}\n\n.right {\n  align-self: center;\n  width: 10%;\n}\n\n[data-theme=\"light\"] {\n  --ifm-background-color: #fff;\n}\n\n.browserWindowAddressBar {\n  flex: 1 0;\n  margin: 0 1rem 0 0.5rem;\n  border-radius: 12.5px;\n  background-color: #fff;\n  color: var(--ifm-color-gray-800);\n  padding: 5px 15px;\n  font: 400 13px Arial, sans-serif;\n  user-select: none;\n  height: 20px;\n}\n\n[data-theme=\"dark\"] .browserWindowAddressBar {\n  color: var(--ifm-color-gray-300);\n}\n\n.dot {\n  margin-right: 6px;\n  margin-top: 4px;\n  height: 12px;\n  width: 12px;\n  background-color: #bbb;\n  border-radius: 50%;\n  display: inline-block;\n}\n\n.browserWindowMenuIcon {\n  margin-left: auto;\n}\n\n.bar {\n  width: 17px;\n  height: 3px;\n  background-color: #aaa;\n  margin: 3px 0;\n  display: block;\n}\n\n.browserWindowBody {\n  background-color: var(--ifm-background-color);\n  border-bottom-left-radius: inherit;\n  border-bottom-right-radius: inherit;\n  padding: 0;\n}\n\n.browserWindowBody > *:last-child {\n  margin-bottom: -8px;\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageCommunity/index.js",
    "content": "import React, { useState, useEffect } from 'react';\nimport clsx from 'clsx';\nimport styles from './styles.module.css';\n\nfunction Testimonial({ testimonialLimitToShow, allTestimonials }) {\n  return (\n    <div className={`${clsx('col col--4')} ${styles.testimonialWrapper}`}>\n      {Object.entries(allTestimonials).map(([key, value]) => {\n        if (key.split(\"_\")[1] <= testimonialLimitToShow) {\n          return (\n            <a\n              key={key}\n              href={value.source.link}\n              target=\"_blank\"\n              rel=\"noopener noreferrer\"\n              className={styles.testimonialAnchor}\n            >\n              <div className={styles.testimonialContainer}>\n                <div className={styles.testimonialHeader}>\n                  <div className={styles.testimonialUserInfo}>\n                    <img src={value.user.profilePic} className={styles.testimonialProfilePic} />\n                    <div>\n                      <h6>{value.user.fullName}</h6>\n                      <p>{value.user.userName}</p>\n                    </div>\n                  </div>\n                  <div>\n                    <img className={styles.testimonialSourceIcon} src={value.source.icon} alt=\"\" />\n                  </div>\n                </div>\n                <div className=\"text--center padding-horiz--md\">\n                  <p className={styles.testimonialDescription}>{value.description}</p>\n                </div>\n              </div>\n            </a>\n          );\n        }\n        return null;\n      })}\n    </div>\n  );\n}\n\n\nconst redditUserProfiles = [\"deadwisdom\", \"benbenbang\", \"Berouald\", \"baggiponte\", \"No-Application5593\", \"code_mc\", \"teajunky\", \"SteamingBeer\", \"BestBottle4517\"];\nconst maxTestimonialSectionToShow = \"4\"\n\nexport default function HomepageCommunity() {\n  const [testimonialLimitToShow, setTestimonialLimitToShow] = useState(\"2\");\n  const [profiles, setProfiles] = useState(redditUserProfiles.reduce(\n    (result, username) => ({\n      ...result,\n      [username]: {\n        icon_img: \"https://www.redditstatic.com/avatars/defaults/v2/avatar_default_1.png\",\n        subreddit: {\n          display_name_prefixed: `u/${username}`,\n        },\n      },\n    }),\n    {}\n  ));\n  const testimonials = [\n    {\n      container_1: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/Python/comments/13i0eaz/comment/jk90bwz/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"deadwisdom\"][\"icon_img\"],\n          userName: profiles[\"deadwisdom\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"deadwisdom\",\n        },\n        description: (\n          <>\n            Well well well, if it isn't the library I was already making but better. Very nice.\n\n            What is your long-term vision for supporting this as a company?\n\n            And are you using this now to support real customers or are you expecting this might help you establish a niche?\n          </>\n        ),\n      },\n      container_2: {\n        source: {\n          icon: \"img/twitter-logo.svg\",\n          link: \"https://twitter.com/emaxerrno/status/1635005087721611264?s=20\",\n        },\n        user: {\n          profilePic: \"img/a-alphabet-round-icon.png\",\n          userName: \"Alexander Gallego\",\n          fullName: \"Alexander Gallego\",\n        },\n        description: (\n          <>\n            this is cool. let me know if you want to share it w/ the @redpandadata community.\n          </>\n        ),\n      },\n      container_3: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/Python/comments/11paz9u/comment/jbxbbxp/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"BestBottle4517\"][\"icon_img\"].replace(/&amp;/g, '&'),\n          userName: profiles[\"BestBottle4517\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"BestBottle4517\",\n        },\n        description: (\n          <>\n            Very cool indeed. Currently at work we're using RabbitMQ for messaging so this doesn't apply to us (for now), but this type and style of implementation is exactly what I would expect when searching for libs like this. Great job!\n          </>\n        ),\n      },\n      container_4: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/programming/comments/11sjtgm/comment/jceqgml/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"teajunky\"][\"icon_img\"],\n          userName: profiles[\"teajunky\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"teajunky\",\n        },\n        description: (\n          <>\n            Wow, the code in the package is auto-generated from Jupyter-Notebooks\n          </>\n        ),\n      },\n      \n    },\n    {\n      container_1: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/FastAPI/comments/124v5di/comment/jfhg2t2/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"benbenbang\"][\"icon_img\"].replace(/&amp;/g, '&'),\n          userName: profiles[\"benbenbang\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"benbenbang\",\n        },\n        description: (\n          <>\n            Nice 👍🏻 I’ve promoted this project in the team! Also, would like to contribute if there’s some kind of roadmap\n          </>\n        ),\n      },\n      container_2: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/Python/comments/11paz9u/comment/jbxf1v8/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"code_mc\"][\"icon_img\"],\n          userName: profiles[\"code_mc\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"code_mc\",\n        },\n        description: (\n          <>\n            I really like the idea of this, as the biggest gripe I have with most pub/sub solutions is all of the tedious boiler plate code needed to correctly subscribe and publish and manage message leases etc. While you often just want to grab a message, do some processing and put it on a different queue.\n          </>\n        ),\n      },\n      container_3: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/FastAPI/comments/11oq09r/comment/jc4dwit/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"No-Application5593\"][\"icon_img\"],\n          userName: profiles[\"No-Application5593\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"No-Application5593\",\n        },\n        description: (\n          <>\n            Wow! This is really great, thank you for your efforts guys. This is what I really need for one of my future projects.\n          </>\n        ),\n      },\n      container_4: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/FastAPI/comments/11oq09r/comment/jbx4dfn/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"SteamingBeer\"][\"icon_img\"].replace(/&amp;/g, '&'),\n          userName: profiles[\"SteamingBeer\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"SteamingBeer\",\n        },\n        description: (\n          <>\n            Thank you for your efforts. I see me pitching this library to my team in the near future!\n          </>\n        ),\n      },\n    },\n    {\n      container_1: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/FastAPI/comments/124v5di/comment/jee9vm9/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"Berouald\"][\"icon_img\"],\n          userName: profiles[\"Berouald\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"Berouald\",\n        },\n        description: (\n          <>\n            This is great! I've been thinking about making a similar tool for quite some time, nice job sir! I guess it's to fit your use case, by why stop at Kafka? A paradigm like this would be awesome in the form of a microframework. Like a general message consumer framework with pluggable interfaces for Kafka, Rabbitmq, ActiveMQ or even the Redis message broker.\n          </>\n        ),\n      },\n      container_2: {\n        source: {\n          icon: \"img/reddit-logo.png\",\n          link: \"https://www.reddit.com/r/Python/comments/120mt5k/comment/jdpwycr/?utm_source=share&utm_medium=web2x&context=3\",\n        },\n        user: {\n          profilePic: profiles[\"baggiponte\"][\"icon_img\"],\n          userName: profiles[\"baggiponte\"][\"subreddit\"][\"display_name_prefixed\"],\n          fullName: \"baggiponte\",\n        },\n        description: (\n          <>\n            Really hope this project becomes as popular as the OG FastAPI!\n          </>\n        ),\n      },\n      \n      container_3: {\n        source: {\n          icon: \"img/twitter-logo.svg\",\n          link: \"https://twitter.com/perbu/status/1635014207656849408?s=20\",\n        },\n        user: {\n          profilePic: \"img/p-alphabet-round-icon.png\",\n          userName: \"Per Buer\",\n          fullName: \"Per Buer\",\n        },\n        description: (\n          <>\n            I really like how we're getting these more specialized ways to leverage streaming databases, instead of the somewhat intimidating access libraries.\n          </>\n        ),\n      },\n      container_4: {\n        source: {\n          icon: \"img/Y_Combinator_Logo.png\",\n          link: \"https://news.ycombinator.com/item?id=35086594\",\n        },\n        user: {\n          profilePic: \"img/I.svg\",\n          userName: \"iknownothow\",\n          fullName: \"iknownothow\",\n        },\n        description: (\n          <>\n            It looks incredible and I truly hope your project takes off for my sake since I have to work with Kafka from time to time!\n          </>\n        ),\n      },\n    },\n  ];\n\n  const handleLoadMore = () => {\n    setTestimonialLimitToShow(testimonialLimitToShow === \"2\" ? \"3\" : Object.keys(testimonials[0]).length);\n  };\n\n  useEffect(() => {\n    async function fetchData() {\n      try {\n        let profilesData = {};\n        for (const profile of redditUserProfiles) {\n          const response = await fetch(`https://www.reddit.com/user/${profile}/about.json`);\n          let data = await response.json();\n          data.data.icon_img = data.data.icon_img.split(\"?\")[0]\n          profilesData[profile] = data.data;\n        }\n        setProfiles(profilesData);\n      } catch (error) {\n        console.error(error);\n      }\n    }\n    fetchData();\n  }, []);\n  return (\n    <section className={`${styles.features}  hero hero--primary`}>\n      <div className=\"container\">\n        <div className={clsx('col col--12')}>\n          <h2 className={styles.title}>The community has spoken!</h2>\n        </div>\n        <div className=\"row\">\n          {testimonials.map((props, idx) => (\n            <Testimonial key={idx} testimonialLimitToShow={testimonialLimitToShow} allTestimonials = {props}  />\n          ))}\n        </div>\n        {testimonialLimitToShow < Object.keys(testimonials[0]).length && (\n          <div className={styles.buttons}>\n            <button className={clsx(\"button button--lg\", styles.heroButton)} onClick={handleLoadMore}>\n                Load More\n            </button>\n          </div>\n        )}\n      </div>\n    </section>\n  );\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageCommunity/styles.module.css",
    "content": ".features {\n  display: flex;\n  align-items: center;\n  padding: 3rem 0;\n  width: 100%;\n  background-color: #60bee4;\n}\n\n.featureSvg {\n  height: 200px;\n  width: 200px;\n}\n.title {\n  font-size: 3rem;\n  text-align: center;\n  padding-bottom: 2rem;\n  color: #fff;\n}\n.subTitle {\n  font-size: 1.5rem;\n  text-align: left;\n}\n.description {\n  font-size: 1rem;\n  text-align: center;\n  margin-top: 3rem;\n}\n.buttons {\n  display: flex;\n  align-items: center;\n  justify-content: center;\n  margin-top: 55px;\n}\n.heroButton {\n  color: #fff;\n  background: var(--ifm-navbar-background-color);\n  border-radius: 25px;\n  padding: 0.7rem 2.5rem 0.7rem 2.5rem;\n  margin-top: -1.5rem;\n  font-size: 1rem;\n}\n.heroButton:hover {\n  background: #3e99c5;\n}\n\n/* .withExtraMargin*/\n/* .testimonialWrapper {\n  margin-top: 60px;\n} */\n\n/* .withExtraMargin a */\n/* .testimonialWrapper a {\n  color: var(--ifm-hero-text-color);\n  display: flex;\n  flex-direction: row;\n  row-gap: 50px;\n} */\n\n.testimonialAnchor {\n  background-color: #fff;\n  padding: 1.6rem;\n  color: #1c1e21; /*var(--ifm-font-color-base); */\n  border-radius: 5px;\n  margin-left: 1rem;\n  margin-top: 1rem;\n  display: block;\n}\n\n.testimonialAnchor:hover {\n  text-decoration: none;\n  color: #1c1e21;\n}\n\n.testimonialWrapper {\n  padding: 0px 0px 3rem 0px;\n  /* margin-top: 10px; */\n}\n\n.testimonialDescription {\n  font-size: 1rem;\n}\n\n.testimonialHeader {\n  display: flex;\n  justify-content: space-between;\n  height: 45px;\n  margin-bottom: 30px;\n}\n.testimonialUserInfo {\n  display: flex;\n}\n\n.testimonialUserInfo h6,\n.testimonialUserInfo p {\n  margin-bottom: 0px;\n  margin-left: 10px;\n}\n\n.testimonialProfilePic {\n  width: 45px;\n  height: auto;\n  border-radius: 50%;\n}\n\n.testimonialSourceIcon {\n  width: 20px;\n  height: auto;\n}\n\n/** Mobile view */\n@media screen and (max-width: 996px) {\n  .testimonialAnchor {\n    margin: 2rem 1rem 0.5rem 1rem;\n  }\n  .testimonialWrapper {\n    padding: 0 var(--ifm-spacing-horizontal);\n  }\n  .title {\n    font-size: 2rem;\n  }\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageFAQ/index.js",
    "content": "import React from 'react';\nimport clsx from 'clsx';\nimport {\n  Accordion,\n  AccordionItem,\n  AccordionItemHeading,\n  AccordionItemButton,\n  AccordionItemPanel,\n} from 'react-accessible-accordion';\n\nimport styles from './styles.module.css';\nimport 'react-accessible-accordion/dist/fancy-example.css';\n\nconst items = [\n  {\n    \"heading\": \"How much does FastKafka cost?\",\n    \"content\": \"FastKafka is under Apache 2.0 license and free to use.\"\n  },\n  {\n    \"heading\": \"How can I contribute or request features?\",\n    \"content\": \"We love and welcome community contributions! Here is a <a href='https://github.com/airtai/fastkafka/blob/main/CONTRIBUTING.md' target='_blank'>doc</a> to get you started. To request features, add a “Feature request” using the New issue button in GitHub from <a href='https://github.com/airtai/fastkafka/issues' target='_blank'>this link</a>, or join our feature-request <a href='https://discord.gg/CJWmYpyFbc' target='_blank'>Discord channel</a>.\"\n  },\n  {\n    \"heading\": \"Do you support any streaming platforms other than Kafka?\",\n    \"content\": \"Slowly, but surely. We built the initial version for Kafka service and for our needs, but we reached out to the wider community to find out what to do next. We added support for Redpanda, and also got requests for RabbitMQ and Pulsar that went to our backlog and we’ll support them in our future releases.\"\n  },\n  {\n    \"heading\": \"Does FastKafka integrate with AsyncAPI in the way that FastAPi integrates with OpenAPI?\",\n    \"content\": \"Very much the same, but with a small difference due to dependencies of AsyncAPI. You write your code using decorators and you get AsyncAPI specification generated automatically as YAML file. You can convert that file to static HTML file ether by Python API call, CLI or github action. AsyncAPI requires Node.js, and you don’t necessarily want this in production.\"\n  },\n  {\n    \"heading\": \"Does it assume that Kafka messages are in JSON format? What if we want to use protobuf, for example?\",\n    \"content\": \"For the first implementation we just released uses with JSON encoded messages, but we can easily add additional formats/protocols. We’ve created an issue on GitHub and will try to prioritize it for one of the next releases.\"\n  },\n]\n\nexport default function HomepageFAQ() {\n  return (\n    <section className={styles.features}>\n      <div className=\"container\">\n      <div className={clsx('col col--12')}>\n          <h2 className={styles.title}>FAQs</h2>\n          <p>For anything not covered here, join <a className={styles.href} href=\"https://discord.gg/CJWmYpyFbc\" target=\"_blank\">our Discord</a></p>\n        </div>\n        <div className={clsx('col col--12 text--left padding-horiz--md')}>\n        <Accordion allowZeroExpanded>\n          {items.map((item, idx) => (\n              <AccordionItem key={idx}>\n                  <AccordionItemHeading>\n                      <AccordionItemButton>\n                          {item.heading}\n                      </AccordionItemButton>\n                  </AccordionItemHeading>\n                  <AccordionItemPanel>\n                  <p className={styles.faqAnswer} dangerouslySetInnerHTML={{__html: item.content}} />\n                  </AccordionItemPanel>\n              </AccordionItem>\n          ))}\n      </Accordion>\n        </div>\n      </div>\n    </section>\n  );\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageFAQ/styles.module.css",
    "content": ".features {\n  display: flex;\n  align-items: center;\n  padding: 2rem 0 8rem 0;\n  width: 100%;\n  background-color: #076d9e;\n}\n\n.featureSvg {\n  height: 200px;\n  width: 200px;\n}\n.title {\n  font-size: 3rem;\n  text-align: center;\n  color: #fff;\n}\n\n.title + p {\n  font-size: 1.5rem;\n  font-style: italic;\n  text-align: center;\n  margin-bottom: 5rem;\n  color: #fff;\n}\n.subTitle {\n  font-size: 1.5rem;\n  text-align: center;\n  color: #fff;\n}\n.description {\n  font-size: 1rem;\n  text-align: center;\n  margin-top: 3rem;\n  color: #fff;\n}\n.rowWitExtraMargin {\n  margin-top: 80px;\n}\n.link {\n  color: var(--ifm-hero-text-color);\n  text-decoration: underline;\n  transition: color var(--ifm-transition-fast)\n    var(--ifm-transition-timing-default);\n}\n\n.wrapper {\n  position: relative;\n}\n\n.verticalAndHorizontalCenter {\n  margin: 0;\n  position: absolute;\n  top: 50%;\n  left: 50%;\n  -ms-transform: translate(-50%, -50%);\n  transform: translate(-50%, -50%);\n}\n\n.href {\n  text-decoration: underline;\n}\n.faqAnswer a {\n  text-decoration: underline;\n}\n\n/** Mobile view */\n@media screen and (max-width: 996px) {\n  .title {\n    font-size: 2rem;\n  }\n  .title + p {\n    margin-bottom: 1rem;\n  }\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageFastkafkaChat/index.js",
    "content": "import React from 'react';\nimport clsx from 'clsx';\n\nimport styles from './styles.module.css';\n\n\n\n// const FeatureList = [\n//   {\n//     title: 'WRITE',\n//     Svg: require('@site/static/img/programming-monitor-svgrepo-com.svg').default,\n//     description: (\n//       <>\n//         producers & consumers for Kafka topics in a simplified way\n//       </>\n//     ),\n//   },\n//   {\n//     title: 'PROTOTYPE',\n//     Svg: require('@site/static/img/rocket-svgrepo-com.svg').default,\n//     description: (\n//       <>\n//         quickly & develop high-performance Kafka-based services\n//       </>\n//     ),\n//   },\n//   {\n//     title: 'STREAMLINE',\n//     Svg: require('@site/static/img/hierarchy-order-svgrepo-com.svg').default,\n//     description: (\n//       <>\n//         your workflow & accelerate your progress\n//       </>\n//     ),\n//   },\n// ];\n\nfunction Feature({Svg, title, description}) {\n  return (\n    <div className={clsx('col col--4')}>\n      <div className=\"text--center\">\n        <Svg className={styles.featureSvg} role=\"img\" />\n      </div>\n      <div className=\"text--center padding-horiz--md\">\n        <h3>{title}</h3>\n        <p>{description}</p>\n      </div>\n    </div>\n  );\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageFastkafkaChat/styles.module.css",
    "content": ".features {\n  display: flex;\n  align-items: center;\n  padding: 5rem 0;\n  width: 100%;\n  background: rgb(82, 175, 216);\n  background: linear-gradient(\n    180deg,\n    rgba(82, 175, 216, 1) 0%,\n    rgba(96, 190, 228, 1) 100%\n  );\n  border-top: 1px solid #8bcae5;\n  border-bottom: 1px solid #8bcae5;\n}\n\n.featureSvg {\n  height: 160px;\n  width: 160px;\n  margin: 30px 0px;\n}\n.title {\n  font-size: 3rem;\n  text-align: center;\n  color: #fff;\n}\n.fastkafkaDescription {\n  font-size: 1.5rem;\n  font-style: italic;\n  text-align: center;\n  margin-bottom: 5rem;\n  color: #fff;\n}\n.subTitle {\n  font-size: 1.5rem;\n  text-align: left;\n  color: #fff;\n}\n.description {\n  font-size: 1rem;\n  text-align: center;\n  margin-top: 3rem;\n  color: #fff;\n}\n.rowWitExtraMargin {\n  margin-top: 80px;\n}\n\n.fastkafkaChatIframe {\n  width: 100%;\n  height: 600px;\n  display: inline-block;\n  position: relative;\n}\n.fastkafkaChatHeader {\n  font-size: 1.8rem;\n  text-align: center;\n}\n\n/** Mobile view */\n@media screen and (max-width: 996px) {\n  .title {\n    font-size: 2rem;\n  }\n}\n\n/* .slantedDiv {\n  position: relative;\n  padding: 200px 0;\n  background: #fff;\n  overflow: visible;\n  z-index: 1;\n}\n\n.slantedDiv:before,\n.slantedDiv:after {\n  content: \"\";\n  width: 100%;\n  height: 100%;\n  position: absolute;\n  background: inherit;\n  z-index: -1;\n  top: 0;\n  transform-origin: left top;\n  transform: skewY(-2deg);\n}\n\n.slantedDiv:after {\n  bottom: 0;\n  transform-origin: left bottom;\n  transform: skewY(3deg);\n} */\n\n/* displays the content inside, as these settings in the parent breaks the effect */\n/* .slantedDiv div {\n  text-align: center;\n  font-size: 1.5em;\n  line-height: 1.5;\n} */\n"
  },
  {
    "path": "docusaurus/src/components/HomepageFeatures/index.js",
    "content": "import React from 'react';\nimport clsx from 'clsx';\n\nimport styles from './styles.module.css';\n\n\n\nconst FeatureList = [\n  {\n    title: 'WRITE',\n    src: \"img/write.svg\",\n    description: (\n      <>\n        producers & consumers for Kafka topics in a simplified way\n      </>\n    ),\n  },\n  {\n    title: 'PROTOTYPE',\n    src: \"img/prototype.svg\",\n    description: (\n      <>\n        quickly & develop high-performance Kafka-based services\n      </>\n    ),\n  },\n  {\n    title: 'STREAMLINE',\n    src: \"img/streamline.svg\",\n    description: (\n      <>\n        your workflow & accelerate your progress\n      </>\n    ),\n  },\n];\n\nfunction Feature({src, title, description}) {\n  return (\n    <div className={clsx('col col--4')}>\n      <div className=\"text--center\">\n        <img className={styles.featureSvg} src={src}/>\n      </div>\n      <div className={clsx(\"text--center padding-horiz--md\"), styles.textContainer}>\n        <h3>{title}</h3>\n        <p>{description}</p>\n      </div>\n    </div>\n  );\n}\n\nexport default function HomepageFeatures() {\n  return (\n    <section className={styles.features}>\n      <div className=\"container\">\n      <div className={clsx('col col--12')}>\n          <h2 className={styles.title}>Swim with the stream…ing services</h2>\n        </div>\n        <div className=\"row\">\n          {FeatureList.map((props, idx) => (\n            <Feature key={idx} {...props} />\n          ))}\n        </div>\n      </div>\n    </section>\n  );\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageFeatures/styles.module.css",
    "content": ".features {\n  display: flex;\n  align-items: center;\n  padding: 1rem 0 4rem 0;\n  width: 100%;\n  background: rgb(20, 116, 166);\n  background: linear-gradient(\n    180deg,\n    rgba(20, 116, 166, 1) 50%,\n    rgba(82, 175, 216, 1) 100%\n  );\n}\n\n.featureSvg {\n  height: 250px;\n  width: 330px;\n  margin: 30px 0px;\n}\n.title {\n  font-size: 3rem;\n  text-align: center;\n  padding-top: 2rem;\n  padding-bottom: 3rem;\n  color: #fff;\n}\n.textContainer {\n  color: #fff;\n  text-align: center;\n  padding: 1rem 2.2rem;\n}\n.textContainer h3 {\n  font-size: 1.5rem;\n}\n.textContainer p {\n  font-size: 1rem;\n}\n.subTitle {\n  font-size: 1.4rem;\n  text-align: left;\n  color: #fff;\n}\n.description {\n  font-size: 1rem;\n  text-align: center;\n  margin-top: 3rem;\n  color: #fff;\n}\n.rowWitExtraMargin {\n  margin-top: 80px;\n}\n\n/** Mobile view */\n@media screen and (max-width: 996px) {\n  .title {\n    font-size: 2rem;\n  }\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageWhatYouGet/index.js",
    "content": "import React from 'react';\nimport clsx from 'clsx';\nimport Link from '@docusaurus/Link';\n\nimport styles from './styles.module.css';\n\nexport default function HomepageWhatYouGet() {\n  return (\n    <section className={styles.features}>\n      <div className=\"container\">\n      <div className={clsx('col col--12')}>\n          <h2 className={styles.title}>You get what you expect</h2>\n        </div>\n        <div className={`row ${styles.childrenWithExtraPadding}`}>\n          <div className={clsx('col col--6 text--center padding-horiz--md')}>\n            <p>Function decorators with type hints specifying Pydantic classes for JSON encoding/decoding, automatic message routing and documentation generation.</p>\n          </div>\n          <div className={clsx('col col--6 text--center padding-horiz--md')}>\n            <p>Built on top of <a className={styles.link} href=\"https://docs.pydantic.dev/\" target=\"_blank\">Pydantic</a>, <a className={styles.link} href=\"https://github.com/aio-libs/aiokafka/\" target=\"_blank\">AIOKafka</a> and <a className={styles.link} href=\"https://www.asyncapi.com/\" target=\"_blank\">AsyncAPI</a>, FastKafka simplifies the process of writing producers and consumers for Kafka topics, handling all the parsing, networking, task scheduling and data generation automatically. </p>\n          </div>\n        </div>\n        {/* <div className={`${styles.rowWitExtraMargin} row`}>\n          <div className={clsx('col col--6', styles.wrapper)}>\n            <div className={`text--center padding-horiz--md ${styles.verticalAndHorizontalCenter}`}>\n            <Link\n              className=\"btn-github-link button button--secondary button--lg\"\n              to=\"https://github.com/airtai/fastkafka\">\n                Check it out\n            </Link>\n            </div>\n          </div>\n          <div className={clsx('col col--6')}>\n            <div className=\"text--center padding-horiz--md\">\n              <img src=\"img/docusaurus-plushie-banner.jpeg\" />\n            </div>\n          </div>\n        </div> */}\n      </div>\n    </section>\n  );\n}\n"
  },
  {
    "path": "docusaurus/src/components/HomepageWhatYouGet/styles.module.css",
    "content": ".features {\n  display: flex;\n  align-items: center;\n  padding: 4rem 0 0 0;\n  width: 100%;\n  background-color: #60bee4;\n  border-bottom: 1px solid #8bcae5;\n}\n\n.features p {\n  text-align: left;\n  font-size: 1rem;\n}\n\n.featureSvg {\n  height: 200px;\n  width: 200px;\n}\n.title {\n  font-size: 3rem;\n  text-align: center;\n  color: #fff;\n}\n.subTitle {\n  font-size: 1.5rem;\n  text-align: left;\n  color: #fff;\n}\n\n.rowWitExtraMargin {\n  margin-top: 80px;\n}\n.link {\n  color: #fff;\n  text-decoration: underline;\n  transition: color var(--ifm-transition-fast)\n    var(--ifm-transition-timing-default);\n}\n.link:hover {\n  color: var(--ifm-hero-text-color);\n}\n\n.wrapper {\n  position: relative;\n}\n\n.verticalAndHorizontalCenter {\n  margin: 0;\n  position: absolute;\n  top: 50%;\n  left: 50%;\n  -ms-transform: translate(-50%, -50%);\n  transform: translate(-50%, -50%);\n}\n.childrenWithExtraPadding {\n  margin: 4rem 0px;\n  /* padding: 0px 30px; */\n  font-size: 1.2rem;\n  color: #fff;\n}\n/** Mobile view */\n@media screen and (max-width: 996px) {\n  .title {\n    font-size: 2rem;\n  }\n}\n"
  },
  {
    "path": "docusaurus/src/components/RobotFooterIcon/index.js",
    "content": "import React from 'react';\nimport clsx from 'clsx';\n\nimport styles from './styles.module.css';\n\nexport default function RobotFooterIcon() {\n  return (\n    <section>\n      <div className={clsx(\"container\", styles.robotFooterContainer)}>\n       <img className={styles.robotFooterIcon} src=\"img/robot-footer.svg\" />\n      </div>\n    </section>\n  );\n}\n"
  },
  {
    "path": "docusaurus/src/components/RobotFooterIcon/styles.module.css",
    "content": ".robotFooterContainer {\n  text-align: center;\n  position: relative;\n}\n\n.robotFooterIcon {\n  width: 7rem;\n  height: auto;\n  position: absolute;\n  margin-top: -4rem;\n  margin-left: -3.5rem;\n}\n"
  },
  {
    "path": "docusaurus/src/css/custom.css",
    "content": "/**\n * Any CSS included here will be global. The classic template\n * bundles Infima by default. Infima is a CSS framework designed to\n * work well for content-centric websites.\n */\n\n/* You can override the default Infima variables here. */\n\n@font-face {\n  font-family: \"Panton-SemiBold\";\n  src: url(\"/static/font/Panton-SemiBold.woff\") format(\"woff\");\n}\n\n@font-face {\n  font-family: \"Rubik-Medium\";\n  src: url(\"/static/font/Rubik-Medium.ttf\") format(\"truetype\");\n}\n\n@font-face {\n  font-family: \"RobotoMono-Regular\";\n  src: url(\"/static/font/RobotoMono-Regular.ttf\") format(\"truetype\");\n}\n\n@font-face {\n  font-family: \"Roboto-Light\";\n  src: url(\"/static/font/Roboto-Light.ttf\") format(\"truetype\");\n}\n\n@font-face {\n  font-family: \"Roboto-Regular\";\n  src: url(\"/static/font/Roboto-Regular.ttf\") format(\"truetype\");\n}\n\n:root {\n  font-family: \"Roboto-Regular\";\n  --ifm-font-family-monospace: \"RobotoMono-Regular\";\n  --ifm-font-family-base: \"Roboto-Regular\";\n  --ifm-heading-font-family: \"Rubik-Medium\";\n  --ifm-color-primary: #56b7e1; /*#2e8555; */\n  --ifm-color-primary-dark: #3cacdc;\n  --ifm-color-primary-darker: #2ea6da;\n  --ifm-color-primary-darkest: #218bb9;\n  --ifm-color-primary-light: #70c2e6;\n  --ifm-color-primary-lighter: #7ec8e8;\n  --ifm-color-primary-lightest: #a5d9ef;\n  --ifm-code-font-size: 95%;\n  --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);\n  --ifm-navbar-background-color: #003257;\n  --ifm-dropdown-background-color: #003257;\n  --ifm-navbar-height: 4.69rem;\n  /* --ifm-font-color-base: #fff; */\n}\n\n/* For readability concerns, you should choose a lighter palette in dark mode. */\n[data-theme=\"dark\"] {\n  --ifm-color-primary: #56b7e1;\n  --ifm-color-primary-dark: #3cacdc;\n  --ifm-color-primary-darker: #2ea6da;\n  --ifm-color-primary-darkest: #218bb9;\n  --ifm-color-primary-light: #70c2e6;\n  --ifm-color-primary-lighter: #7ec8e8;\n  --ifm-color-primary-lightest: #a5d9ef;\n  --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);\n  /* --ifm-navbar-background-color: #242526; */\n}\n\nhtml[data-theme=\"dark\"] .DocSearch-Button {\n  background: #ebedf0;\n  color: #969faf;\n}\n\nhtml[data-theme=\"dark\"] .DocSearch-Button:hover {\n  background: #fff;\n  box-shadow: inset 0 0 0 2px var(--docsearch-primary-color);\n  color: #1c1e21;\n}\n\nhtml[data-theme=\"dark\"] .DocSearch-Button .DocSearch-Search-Icon {\n  color: #1c1e21;\n}\n\nhtml[data-theme=\"dark\"] .DocSearch-Button .DocSearch-Button-Key {\n  background: linear-gradient(-225deg, #d5dbe4, #f8f8f8);\n  color: #969faf;\n  box-shadow: inset 0 -2px 0 0 #cdcde6, inset 0 0 1px 1px #fff,\n    0 1px 2px 1px rgba(30, 35, 90, 0.4);\n}\n\n/* default settings for both tablet and desktop */\n.navbar.navbar--fixed-top\n  .navbar__items\n  > a.fastkafka-home-mobile\n  + div\n  > button,\n.navbar-sidebar .navbar-sidebar__brand div > button {\n  color: #fff;\n}\n.navbar.navbar--fixed-top\n  .navbar__items\n  > a.fastkafka-home-mobile\n  + div\n  > button:hover,\n.navbar-sidebar .navbar-sidebar__brand div > button:hover {\n  background: #8c9fae;\n}\n\n[data-theme=\"dark\"]\n  .navbar.navbar--fixed-top\n  .navbar__items\n  > a.fastkafka-home-mobile\n  + div\n  > button:hover,\n.navbar-sidebar .navbar-sidebar__brand div > button:hover {\n  background: #444950;\n}\n\n.navbar-sidebar .navbar-sidebar__items .navbar-sidebar__item > button,\n.navbar-sidebar .navbar-sidebar__items .navbar-sidebar__item > ul > li a {\n  color: #fff;\n}\n\n.menu__list-item--collapsed .menu__link--sublist-caret:after {\n  background: url(\"/static/img/icon-arrow-right-blue.svg\") 50% / 2rem 2rem;\n  min-width: 1rem;\n  width: 1rem;\n  height: 1rem;\n  transform: rotateZ(0deg);\n  filter: none;\n}\n.menu__link--sublist-caret:after {\n  background: url(\"/static/img/icon-arrow-right-blue.svg\") 50% / 2rem 2rem;\n  transform: rotate(90deg);\n  min-width: 1rem;\n  width: 1rem;\n  height: 1rem;\n  filter: none;\n}\n.navbar-sidebar__back,\n.menu__link--active:not(.menu__link--sublist) {\n  background: rgba(255, 255, 255, 0.05);\n}\n/* + div[class^=\"toggle_\"] { */\nhtml.plugin-pages .navbar__items.navbar__items--right > a + div > button {\n  display: none;\n}\n\n.navbar.navbar-sidebar--show .navbar-sidebar .navbar-sidebar__brand > div {\n  margin-left: auto;\n  margin-right: 1rem !important;\n}\n\n.navbar.navbar-sidebar--show\n  .navbar-sidebar\n  .navbar-sidebar__brand\n  .navbar-sidebar__close {\n  margin-left: unset;\n}\n\nhtml.plugin-pages\n  .navbar.navbar-sidebar--show\n  .navbar-sidebar\n  .navbar-sidebar__brand\n  > div {\n  display: none;\n}\n\nhtml.plugin-pages\n  .navbar.navbar-sidebar--show\n  .navbar-sidebar\n  .navbar-sidebar__brand\n  .navbar-sidebar__close {\n  margin-left: auto;\n}\n\n.navbar--fixed-top {\n  padding-top: 0px;\n  padding-bottom: 0px;\n}\n.navbar__title {\n  color: #fff;\n  font-size: 3.5rem;\n  font-family: var(--ifm-heading-font-family);\n  font-weight: 100;\n  line-height: var(--ifm-heading-line-height);\n  margin-left: -0.4rem;\n}\n\n.navbar__brand:hover {\n  color: var(--ifm-navbar-link-color);\n}\n\n.navbar__items.navbar__items--right .navbar__link {\n  color: #fff;\n}\n.navbar__items.navbar__items--right .navbar__item.dropdown .dropdown__link {\n  color: #fff;\n}\n.navbar__items.navbar__items--right\n  .navbar__item.dropdown\n  .dropdown__link:hover {\n  color: var(--ifm-link-hover-color);\n}\n.navbar__items.navbar__items--right .navbar__item.dropdown,\n.navbar__items.navbar__items--right .navbar__item.navbar__link {\n  border-right: 1px solid #214c6c;\n  padding: 23px 18px;\n}\n.navbar__items.navbar__items--right\n  .navbar__item.navbar__link.header-discord-link {\n  border-right: none;\n}\n\n.dropdown > .navbar__link:after {\n  margin-left: 0.5em;\n  top: 1px;\n  font-size: 0.8rem;\n}\n\n.navbar__logo {\n  width: 2.156rem;\n  height: auto;\n  margin-top: 1.2rem;\n}\n\n.navbar__brand {\n  margin-left: 0;\n  margin-right: 0;\n}\n\nhtml.docs-doc-page main article .markdown > h2.anchor {\n  font-weight: 400;\n}\nhtml.docs-doc-page main article .markdown > h3.anchor {\n  font-weight: 500;\n  font-family: var(--ifm-font-family-monospace);\n}\nhtml.docs-doc-page main article .markdown > h3.anchor > strong,\nhtml.docs-doc-page main ul.table-of-contents > li > ul a > strong {\n  font-weight: 500;\n}\n\nhtml.docs-doc-page main article .markdown table {\n    text-align: left;\n    font-size: 0.8rem;\n}\n\nhtml.docs-doc-page main article .markdown table code{\n    border: 1px solid rgba(0, 0, 0, 0.1);\n}\n\nnav .navbar__inner .navbar__items .fastkafka-home > div {\n  margin-top: 1px;\n}\nnav .navbar__inner .navbar__items .fastkafka-home > div > p {\n  display: inline-block;\n  margin: 0;\n  color: #fff;\n  font-size: 1.2rem;\n  font-family: var(--ifm-heading-font-family);\n  font-weight: 100;\n  line-height: var(--ifm-heading-line-height);\n}\nnav .navbar__inner .navbar__items .fastkafka-home > div > img {\n  width: 15px;\n  height: auto;\n  margin-right: 5px;\n}\n.fastkafka-home-mobile {\n  display: none;\n}\n\n.navbar.navbar--fixed-top .navbar__items--right {\n  justify-content: end;\n}\n@media screen and (max-width: 996px) {\n  .navbar-sidebar .navbar-sidebar__items .menu__link.fastkafka-home {\n    display: none;\n  }\n}\n\n.navbar__link {\n  font-size: 1rem;\n}\n.navbar__toggle {\n  color: #fff;\n}\n@media screen and (max-width: 1024px) {\n  .navbar__toggle {\n    margin-top: 0.4rem;\n  }\n}\n\n.header-discord-link:before,\n.footer-discord-link:before {\n  content: \"\";\n  display: flex;\n  height: 24px;\n  width: 24px;\n  background-color: #8c9fae;\n  -webkit-mask-image: url(\"/img/icon-discord.svg\");\n  mask-image: url(\"/img/icon-discord.svg\");\n}\n\n.header-discord-link:hover,\n.footer-discord-link:hover {\n  opacity: 0.6;\n}\n\n.footer-github-link:before {\n  content: \"\";\n  display: flex;\n  height: 24px;\n  width: 24px;\n  background-color: #8c9fae;\n  -webkit-mask-image: url(\"/img/icon-github.svg\");\n  mask-image: url(\"/img/icon-github.svg\");\n}\n\n.footer-github-link:hover {\n  opacity: 0.6;\n}\n\n.footer-facebook-link:hover {\n  opacity: 0.6;\n}\n\n.footer-facebook-link:before {\n  content: \"\";\n  display: flex;\n  height: 24px;\n  width: 24px;\n  background-color: #8c9fae;\n  -webkit-mask-image: url(\"/img/icon-facebook.svg\");\n  mask-image: url(\"/img/icon-facebook.svg\");\n}\n\n.footer-twitter-link:hover {\n  opacity: 0.6;\n}\n\n.footer-twitter-link:before {\n  content: \"\";\n  display: flex;\n  height: 24px;\n  width: 24px;\n  background-color: #8c9fae;\n  -webkit-mask-image: url(\"/img/icon-twitter.svg\");\n  mask-image: url(\"/img/icon-twitter.svg\");\n}\n\n.footer-linkedin-link:hover {\n  opacity: 0.6;\n}\n\n.footer-linkedin-link:before {\n  content: \"\";\n  display: flex;\n  height: 24px;\n  width: 24px;\n  background-color: #8c9fae;\n  -webkit-mask-image: url(\"/img/icon-linkedin.svg\");\n  mask-image: url(\"/img/icon-linkedin.svg\");\n}\n\n.github-stars {\n  display: flex;\n  height: 40px;\n  width: 150px;\n  margin-left: 12px;\n}\n\nh3.anchor > code {\n  font-size: 1rem;\n}\n\n.footer.footer--dark {\n  background-color: #003257;\n  /* height: 18.75rem; */\n}\n.footer.footer--dark .container.container-fluid .footer__bottom {\n  position: absolute;\n  padding: 2rem;\n  width: 100%;\n  background-color: #003a60;\n  left: 0;\n}\n.footer.footer--dark .footer__copyright {\n  opacity: 0.5;\n  font-size: 0.85rem;\n  letter-spacing: 0.025rem;\n}\n.footer.footer--dark .footer__col {\n  margin: 2.3rem auto 5rem;\n  height: 10rem;\n  border-left: 2px solid rgb(33, 76, 108);\n  padding-left: 1.5rem;\n}\n.footer.footer--dark .footer__col .footer__title {\n  letter-spacing: 0.025rem;\n  font-size: 1rem;\n}\n.footer.footer--dark .footer__col:first-child .footer__item {\n  display: inline-block;\n  padding: 0.3rem 0.3rem 0.3rem 0.3rem;\n}\n.footer.footer--dark .footer__col:first-child .footer__item:first-child {\n  padding-left: 0;\n}\n.footer.footer--dark .footer__col .footer__link-item {\n  text-decoration: underline;\n  font-size: 0.9rem;\n}\n\na.link-to-source {\n    margin: 0 0 1rem 0;\n    display: inline-block;\n}\n\na.link-to-source::after {\n    content: \"\";\n  display: flex;\n  height: 24px;\n  width: 24px;\n  background-color: var(--ifm-link-color);\n  -webkit-mask-image: url(\"data:image/svg+xml,%3Csvg width='24' height='24' viewBox='0 0 24 24' stroke-width='1.5' fill='none' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M21 3L15 3M21 3L12 12M21 3V9' stroke='currentColor' stroke-linecap='round' stroke-linejoin='round'/%3E%3Cpath d='M21 13V19C21 20.1046 20.1046 21 19 21H5C3.89543 21 3 20.1046 3 19V5C3 3.89543 3.89543 3 5 3H11' stroke='currentColor' stroke-linecap='round'/%3E%3C/svg%3E%0A\");\n  mask-image: url(\"data:image/svg+xml,%3Csvg width='24' height='24' viewBox='0 0 24 24' stroke-width='1.5' fill='none' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M21 3L15 3M21 3L12 12M21 3V9' stroke='currentColor' stroke-linecap='round' stroke-linejoin='round'/%3E%3Cpath d='M21 13V19C21 20.1046 20.1046 21 19 21H5C3.89543 21 3 20.1046 3 19V5C3 3.89543 3.89543 3 5 3H11' stroke='currentColor' stroke-linecap='round'/%3E%3C/svg%3E%0A\");\n  display: inline-block;\n  position: absolute;\n    margin-left: 0rem;\n    margin-top: 0.02rem;\n    transform: scale(0.67);\n}\n\n.footer.footer--dark .footer__col .footer__link-item::after {\n  content: \"\";\n  display: flex;\n  height: 35px;\n  width: 42px;\n  background-color: #8c9fae;\n  -webkit-mask-image: url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iNDgiIHZpZXdCb3g9IjAgOTYgOTYwIDk2MCIgd2lkdGg9IjQ4Ij48cGF0aCBkPSJNNTQwIDc5M3EtOS05LTktMjEuNXQ4LTIwLjVsMTQ3LTE0N0gxOTBxLTEzIDAtMjEuNS04LjVUMTYwIDU3NHEwLTEzIDguNS0yMS41VDE5MCA1NDRoNDk2TDUzOCAzOTZxLTktOS04LjUtMjF0OS41LTIxcTktOCAyMS41LTh0MjAuNSA4bDE5OSAxOTlxNSA1IDcgMTB0MiAxMXEwIDYtMiAxMXQtNyAxMEw1ODIgNzkzcS05IDktMjEgOXQtMjEtOVoiLz48L3N2Zz4=);\n  mask-image: url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iNDgiIHZpZXdCb3g9IjAgOTYgOTYwIDk2MCIgd2lkdGg9IjQ4Ij48cGF0aCBkPSJNNTQwIDc5M3EtOS05LTktMjEuNXQ4LTIwLjVsMTQ3LTE0N0gxOTBxLTEzIDAtMjEuNS04LjVUMTYwIDU3NHEwLTEzIDguNS0yMS41VDE5MCA1NDRoNDk2TDUzOCAzOTZxLTktOS04LjUtMjF0OS41LTIxcTktOCAyMS41LTh0MjAuNSA4bDE5OSAxOTlxNSA1IDcgMTB0MiAxMXEwIDYtMiAxMXQtNyAxMEw1ODIgNzkzcS05IDktMjEgOXQtMjEtOVoiLz48L3N2Zz4=);\n  display: inline-block;\n  transform: scale(0.4);\n  position: absolute;\n  margin-left: -0.5rem;\n  margin-top: -0.4rem;\n}\n.footer.footer--dark .footer__col:last-child .footer__link-item::after {\n  display: none;\n}\n.footer.footer--dark .footer__col .footer__link-item > svg {\n  display: none;\n}\n\n/**\n* ----------------------------------------------\n* FAQ accordion styles starts\n* ----------------------------------------------\n**/\n.container .accordion {\n  border: 1px solid rgba(139, 202, 229, 0.1);\n  border-radius: 2px;\n}\n\n.accordion__item + .accordion__item {\n  border-top: 1px solid rgba(139, 202, 229, 0.1);\n}\n\n.accordion__item .accordion__button {\n  background-color: #076d9e;\n  /* color: var(--ifm-font-color-base); */\n  color: #fff;\n  cursor: pointer;\n  padding: 2rem;\n  width: 100%;\n  text-align: left;\n  border: 1px solid #8bcae5;\n  font-size: 1rem;\n  margin: 0.5rem 0;\n}\n\n.accordion__item .accordion__button:hover {\n  background-color: #60bee4;\n}\n\n.accordion__button:before {\n  display: inline-block;\n  content: \"\";\n  height: 10px;\n  width: 10px;\n  margin-right: 12px;\n  border-bottom: 2px solid currentColor;\n  border-right: 2px solid currentColor;\n  transform: rotate(-45deg);\n}\n\n.accordion__button[aria-expanded=\"true\"]::before,\n.accordion__button[aria-selected=\"true\"]::before {\n  transform: rotate(45deg);\n}\n\n[hidden] {\n  display: none;\n}\n\n.accordion__item .accordion__panel {\n  padding: 2rem 2rem 1rem 2rem;\n  animation: fadein 0.35s ease-in;\n  color: #fff;\n  font-size: 1rem;\n  /* border: 1px solid #8bcae5; */\n  /* border-top: none; */\n}\n\n/* -------------------------------------------------- */\n/* ---------------- Animation part ------------------ */\n/* -------------------------------------------------- */\n\n@keyframes fadein {\n  0% {\n    opacity: 0;\n  }\n\n  100% {\n    opacity: 1;\n  }\n}\n\n/** Mobile view */\n@media screen and (max-width: 996px) {\n  .accordion__item .accordion__button {\n    font-size: 1.1rem;\n    padding: 1rem;\n  }\n  .accordion__item .accordion__panel {\n    font-size: 1.1rem;\n    padding: 1rem 1rem 0.3rem 1rem;\n  }\n  .footer.footer--dark .footer__col {\n    margin: 1rem auto 1rem;\n    height: auto;\n    border: none;\n  }\n}\n\n/**\n* ----------------------------------------------\n* FAQ accordion ends\n* ----------------------------------------------\n**/\n\n.prism-code.language-py code .token.decorator {\n  color: #c5221f !important;\n}\n\nhtml.docs-doc-page[data-theme=\"dark\"]\n  .prism-code.language-py\n  code\n  .token.decorator {\n  color: #fbc02d !important;\n}\n\n/** Tablet view */\n@media screen and (max-width: 1290px) {\n  .navbar__items.navbar__items--right .navbar__item.dropdown,\n  .navbar__items.navbar__items--right .navbar__item.navbar__link {\n    padding: 23px 18px;\n  }\n  .navbar__title {\n    font-size: 2.8rem;\n    /* margin-left: 1.8rem; */\n    margin-top: -0.3rem;\n    padding: 8px 1rem 8px 0px;\n  }\n  .navbar__logo {\n    margin-top: 0.5rem;\n    width: 1.7rem;\n  }\n  .navbar__brand {\n    margin-left: 0;\n    margin-top: 0.4rem;\n  }\n}\n\n/** Tablet view */\n@media screen and (max-width: 996px) {\n  .navbar__item.github-stars {\n    display: none;\n  }\n  ul.menu__list li {\n    margin-bottom: 10px;\n  }\n  .navbar__items.navbar__items--right\n    .navbar__item.navbar__link.fastkafka-home-mobile {\n    display: block;\n    padding: 0px;\n    margin-right: 11rem;\n    border-right: none;\n    margin-top: 7px;\n  }\n  .navbar__items.navbar__items--right\n    .navbar__item.navbar__link.fastkafka-home-mobile\n    img {\n    width: 30px;\n    height: auto;\n  }\n}\n\n/** Mobile view */\n@media screen and (max-width: 768px) {\n  .navbar__items.navbar__items--right\n    .navbar__item.navbar__link.fastkafka-home-mobile {\n    margin-right: 3.5rem;\n  }\n  .navbar__items.navbar__items--right\n    .navbar__item.navbar__link.fastkafka-home-mobile\n    img {\n    width: 33px;\n  }\n}\n"
  },
  {
    "path": "docusaurus/src/pages/demo/index.js",
    "content": "import React from 'react';\nimport clsx from 'clsx';\nimport Layout from '@theme/Layout';\nimport YouTube from 'react-youtube';\n\nimport styles from './styles.module.css';\n\nconst opts = {\n      height: '720',\n      width: '1280',\n    };\n\nexport default function Hello() {\n  return (\n    <Layout title=\"Demo\" description=\"Demo\">\n      <section className={`hero hero--primary ${styles.containerWithMinHeight}`}>\n      <div className=\"container\">\n        <div className=\"row\">\n          <div className=\"col col--12\">\n            <YouTube videoId=\"dQw4w9WgXcQ\" opts={opts}/>\n          </div>\n        </div>\n      </div>\n    </section>\n    </Layout>\n  );\n}"
  },
  {
    "path": "docusaurus/src/pages/demo/styles.module.css",
    "content": ".features {\n  display: flex;\n  align-items: center;\n  padding: 2rem 0;\n  width: 100%;\n}\n\n.header {\n  font-size: 4rem;\n  text-align: center;\n}\n\n.description {\n  font-size: 1.2rem;\n  margin-top: 1rem;\n}\n\n.containerWithMinHeight {\n  min-height: 500px;\n}"
  },
  {
    "path": "docusaurus/src/pages/index.js",
    "content": "import React from 'react';\nimport clsx from 'clsx';\nimport Link from '@docusaurus/Link';\nimport useDocusaurusContext from '@docusaurus/useDocusaurusContext';\nimport Layout from '@theme/Layout';\nimport HomepageFeatures from '@site/src/components/HomepageFeatures';\nimport HomepageWhatYouGet from '@site/src/components/HomepageWhatYouGet';\nimport HomepageCommunity from '@site/src/components/HomepageCommunity';\nimport HomepageFAQ from '@site/src/components/HomepageFAQ';\nimport RobotFooterIcon from '@site/src/components/RobotFooterIcon';\n\nimport styles from './index.module.css';\n\nfunction HomepageHeader() {\n  return (\n    <header className={clsx('hero hero--primary', styles.heroBanner)}>\n      <div className=\"container\">\n        <img className={styles.heroRobot} src=\"img/robot-hero.svg\" />\n        <p className={styles.description}>Open-source framework for building asynchronous web </p>\n        <p className={styles.description}>services that interact with Kafka</p>\n        <p className={styles.descriptionMobile}>Open-source framework for building asynchronous web services that interact with Kafka</p>\n        <div className={styles.buttons}>\n          <Link\n            className={clsx(\"button button--lg\", styles.heroButton)}\n            to=\"/docs\">\n              Get Started\n          </Link>\n        </div>\n      </div>\n    </header>\n  );\n}\n\nexport default function Home() {\n  const {siteConfig} = useDocusaurusContext();\n  return (\n    <Layout\n      title={siteConfig.tagline}\n      description={siteConfig.customFields.description}>\n      <HomepageHeader />\n      <main>\n        <HomepageFeatures />\n        <HomepageWhatYouGet />\n        <HomepageCommunity />\n        <HomepageFAQ />\n        <RobotFooterIcon />\n      </main>\n    </Layout>\n  );\n}\n"
  },
  {
    "path": "docusaurus/src/pages/index.module.css",
    "content": "/**\n * CSS files with the .module.css suffix will be treated as CSS modules\n * and scoped locally.\n */\n\n.heroBanner {\n  padding: 4rem 0;\n  text-align: center;\n  position: relative;\n  overflow: hidden;\n  background: rgb(96, 190, 228);\n  background: linear-gradient(\n    180deg,\n    rgba(96, 190, 228, 1) 0%,\n    rgba(17, 115, 164, 1) 100%\n  );\n}\n.heroRobot {\n  width: 870px;\n  margin-top: 1rem;\n  margin-bottom: 2rem;\n}\n\n.buttons {\n  display: flex;\n  align-items: center;\n  justify-content: center;\n  margin-top: 55px;\n}\n\n.title {\n  font-size: 3rem;\n  margin-bottom: 60px;\n}\n\n.description {\n  font-size: 1.5rem;\n  line-height: 0.8rem;\n  font-style: italic;\n  color: #fff;\n}\n.heroButton {\n  color: #fff;\n  background: var(--ifm-navbar-background-color);\n  border-radius: 25px;\n  padding: 0.7rem 2.5rem 0.7rem 2.5rem;\n  margin-top: -1.5rem;\n  font-size: 1rem;\n}\n.heroButton:hover {\n  background: #3e99c5;\n}\n.descriptionMobile {\n  display: none;\n}\n\n/** Tablet view */\n@media screen and (max-width: 1290px) {\n}\n\n/** Mobile view */\n@media screen and (max-width: 996px) {\n  .heroBanner {\n    padding: 2rem;\n  }\n  .description {\n    display: none;\n  }\n  .descriptionMobile {\n    font-size: 1.3rem;\n    font-style: italic;\n    line-height: 1.8rem;\n    margin-bottom: 0;\n    display: block;\n  }\n}\n"
  },
  {
    "path": "docusaurus/src/utils/prismDark.mjs",
    "content": "/**\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nimport darkTheme from 'prism-react-renderer/themes/vsDark/index.cjs.js';\n\nexport default {\n  plain: {\n    color: '#D4D4D4',\n    backgroundColor: '#212121',\n  },\n  styles: [\n    ...darkTheme.styles,\n    {\n      types: ['title'],\n      style: {\n        color: '#569CD6',\n        fontWeight: 'bold',\n      },\n    },\n    {\n      types: ['property', 'parameter'],\n      style: {\n        color: '#9CDCFE',\n      },\n    },\n    {\n      types: ['script'],\n      style: {\n        color: '#D4D4D4',\n      },\n    },\n    {\n      types: ['boolean', 'arrow', 'atrule', 'tag'],\n      style: {\n        color: '#569CD6',\n      },\n    },\n    {\n      types: ['number', 'color', 'unit'],\n      style: {\n        color: '#B5CEA8',\n      },\n    },\n    {\n      types: ['font-matter'],\n      style: {\n        color: '#CE9178',\n      },\n    },\n    {\n      types: ['keyword', 'rule'],\n      style: {\n        color: '#C586C0',\n      },\n    },\n    {\n      types: ['regex'],\n      style: {\n        color: '#D16969',\n      },\n    },\n    {\n      types: ['maybe-class-name'],\n      style: {\n        color: '#4EC9B0',\n      },\n    },\n    {\n      types: ['constant'],\n      style: {\n        color: '#4FC1FF',\n      },\n    },\n  ],\n};"
  },
  {
    "path": "docusaurus/src/utils/prismLight.mjs",
    "content": "/**\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nimport lightTheme from 'prism-react-renderer/themes/github/index.cjs.js';\n\nexport default {\n  ...lightTheme,\n  styles: [\n    ...lightTheme.styles,\n    {\n      types: ['title'],\n      style: {\n        color: '#0550AE',\n        fontWeight: 'bold',\n      },\n    },\n    {\n      types: ['parameter'],\n      style: {\n        color: '#953800',\n      },\n    },\n    {\n      types: ['boolean', 'rule', 'color', 'number', 'constant', 'property'],\n      style: {\n        color: '#005CC5',\n      },\n    },\n    {\n      types: ['atrule', 'tag'],\n      style: {\n        color: '#22863A',\n      },\n    },\n    {\n      types: ['script'],\n      style: {\n        color: '#24292E',\n      },\n    },\n    {\n      types: ['operator', 'unit', 'rule'],\n      style: {\n        color: '#D73A49',\n      },\n    },\n    {\n      types: ['font-matter', 'string', 'attr-value'],\n      style: {\n        color: '#C6105F',\n      },\n    },\n    {\n      types: ['class-name'],\n      style: {\n        color: '#116329',\n      },\n    },\n    {\n      types: ['attr-name'],\n      style: {\n        color: '#0550AE',\n      },\n    },\n    {\n      types: ['keyword'],\n      style: {\n        color: '#CF222E',\n      },\n    },\n    {\n      types: ['function'],\n      style: {\n        color: '#8250DF',\n      },\n    },\n    {\n      types: ['selector'],\n      style: {\n        color: '#6F42C1',\n      },\n    },\n    {\n      types: ['variable'],\n      style: {\n        color: '#E36209',\n      },\n    },\n    {\n      types: ['comment'],\n      style: {\n        color: '#6B6B6B',\n      },\n    },\n    {\n      types: ['builtin'],\n      style: {\n        color: '#005CC5',\n      },\n    },\n  ],\n};"
  },
  {
    "path": "docusaurus/static/.nojekyll",
    "content": ""
  },
  {
    "path": "docusaurus/static/CNAME",
    "content": "fastkafka.airt.ai\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/CHANGELOG.md",
    "content": "# Release notes\n\n<!-- do not remove -->\n\n## 0.5.0\n\n### New Features\n\n- Significant speedup of Kafka producer ([#236](https://github.com/airtai/fastkafka/pull/236)), thanks to [@Sternakt](https://github.com/Sternakt)\n \n\n- Added support for AVRO encoding/decoding ([#231](https://github.com/airtai/fastkafka/pull/231)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n### Bugs Squashed\n\n- Fixed sidebar to include guides in docusaurus documentation ([#238](https://github.com/airtai/fastkafka/pull/238)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Fixed link to symbols in docusaurus docs ([#227](https://github.com/airtai/fastkafka/pull/227)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Removed bootstrap servers from constructor ([#220](https://github.com/airtai/fastkafka/pull/220)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n## 0.4.0\n\n### New Features\n\n- Integrate fastkafka chat ([#208](https://github.com/airtai/fastkafka/pull/208)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add benchmarking ([#206](https://github.com/airtai/fastkafka/pull/206)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Enable fast testing without running kafka locally ([#198](https://github.com/airtai/fastkafka/pull/198)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Generate docs using Docusaurus ([#194](https://github.com/airtai/fastkafka/pull/194)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add test cases for LocalRedpandaBroker ([#189](https://github.com/airtai/fastkafka/pull/189)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Reimplement patch and delegates from fastcore ([#188](https://github.com/airtai/fastkafka/pull/188)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Rename existing functions into start and stop and add lifespan handler ([#117](https://github.com/airtai/fastkafka/issues/117))\n  - https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios\n\n\n## 0.3.1\n\n-  README.md file updated\n\n\n## 0.3.0\n\n### New Features\n\n- Guide for fastkafka produces using partition key ([#172](https://github.com/airtai/fastkafka/pull/172)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #161\n\n- Add support for Redpanda for testing and deployment ([#181](https://github.com/airtai/fastkafka/pull/181)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Remove bootstrap_servers from __init__ and use the name of broker as an option when running/testing ([#134](https://github.com/airtai/fastkafka/issues/134))\n\n- Add a GH action file to check for broken links in the docs ([#163](https://github.com/airtai/fastkafka/issues/163))\n\n- Optimize requirements for testing and docs ([#151](https://github.com/airtai/fastkafka/issues/151))\n\n- Break requirements into base and optional for testing and dev ([#124](https://github.com/airtai/fastkafka/issues/124))\n  - Minimize base requirements needed just for running the service.\n\n- Add link to example git repo into guide for building docs using actions ([#81](https://github.com/airtai/fastkafka/issues/81))\n\n- Add logging for run_in_background ([#46](https://github.com/airtai/fastkafka/issues/46))\n\n- Implement partition Key mechanism for producers ([#16](https://github.com/airtai/fastkafka/issues/16))\n\n### Bugs Squashed\n\n- Implement checks for npm installation and version ([#176](https://github.com/airtai/fastkafka/pull/176)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #158 by checking if the npx is installed and more verbose error handling\n\n- Fix the helper.py link in CHANGELOG.md ([#165](https://github.com/airtai/fastkafka/issues/165))\n\n- fastkafka docs install_deps fails ([#157](https://github.com/airtai/fastkafka/issues/157))\n  - Unexpected internal error: [Errno 2] No such file or directory: 'npx'\n\n- Broken links in docs ([#141](https://github.com/airtai/fastkafka/issues/141))\n\n- fastkafka run is not showing up in CLI docs ([#132](https://github.com/airtai/fastkafka/issues/132))\n\n\n## 0.2.3\n\n- Fixed broken links on PyPi index page\n\n\n## 0.2.2\n\n### New Features\n\n- Extract JDK and Kafka installation out of LocalKafkaBroker ([#131](https://github.com/airtai/fastkafka/issues/131))\n\n- PyYAML version relaxed ([#119](https://github.com/airtai/fastkafka/pull/119)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Replace docker based kafka with local ([#68](https://github.com/airtai/fastkafka/issues/68))\n  - [x] replace docker compose with a simple docker run (standard run_jupyter.sh should do)\n  - [x] replace all tests to use LocalKafkaBroker\n  - [x] update documentation\n\n### Bugs Squashed\n\n- Fix broken link for FastKafka docs in index notebook ([#145](https://github.com/airtai/fastkafka/issues/145))\n\n- Fix encoding issues when loading setup.py on windows OS ([#135](https://github.com/airtai/fastkafka/issues/135))\n\n\n## 0.2.0\n\n### New Features\n\n- Replace kafka container with LocalKafkaBroker ([#112](https://github.com/airtai/fastkafka/issues/112))\n  - - [x] Replace kafka container with LocalKafkaBroker in tests\n- [x] Remove kafka container from tests environment\n- [x] Fix failing tests\n\n### Bugs Squashed\n\n- Fix random failing in CI ([#109](https://github.com/airtai/fastkafka/issues/109))\n\n\n## 0.1.3\n\n- version update in __init__.py\n\n\n## 0.1.2\n\n### New Features\n\n\n- Git workflow action for publishing Kafka docs ([#78](https://github.com/airtai/fastkafka/issues/78))\n\n\n### Bugs Squashed\n\n- Include missing requirement ([#110](https://github.com/airtai/fastkafka/issues/110))\n  - [x] Typer is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py) but it is not included in [settings.ini](https://github.com/airtai/fastkafka/blob/main/settings.ini)\n  - [x] Add aiohttp which is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py)\n  - [x] Add nbformat which is imported in _components/helpers.py\n  - [x] Add nbconvert which is imported in _components/helpers.py\n\n\n## 0.1.1\n\n\n### Bugs Squashed\n\n- JDK install fails on Python 3.8 ([#106](https://github.com/airtai/fastkafka/issues/106))\n\n\n\n## 0.1.0\n\nInitial release\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/CNAME",
    "content": "fastkafka.airt.ai\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/api/fastkafka/FastKafka.md",
    "content": "## `fastkafka.FastKafka` {#fastkafka.FastKafka}\n\n### `__init__` {#init}\n\n`def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Dict[str, Any], root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AbstractAsyncContextManager[NoneType]]] = None, loop=None, client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None`\n\nCreates FastKafka application\n\n**Parameters**:\n- `title`: optional title for the documentation. If None,\nthe title will be set to empty string\n- `description`: optional description for the documentation. If\nNone, the description will be set to empty string\n- `version`: optional version for the documentation. If None,\nthe version will be set to empty string\n- `contact`: optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'\n- `kafka_brokers`: dictionary describing kafka brokers used for\ngenerating documentation\n- `root_path`: path to where documentation will be created\n- `lifespan`: asynccontextmanager that is used for setting lifespan hooks.\n__aenter__ is called before app start and __aexit__ after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:`async with kafka_app...`\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Union[~O, NoneType]]], typing.Callable[[~I], typing.Union[~O, NoneType]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]], typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/api/fastkafka/KafkaEvent.md",
    "content": "## `fastkafka.KafkaEvent` {#fastkafka.KafkaEvent}\n\n\nA generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel\n\n**Parameters**:\n- `message`: The message contained in the Kafka event, can be of type pydantic.BaseModel.\n- `key`: The optional key used to identify the Kafka event.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/api/fastkafka/encoder/avsc_to_pydantic.md",
    "content": "## `fastkafka.encoder.avsc_to_pydantic` {#fastkafka.encoder.avsc_to_pydantic}\n\n### `avsc_to_pydantic` {#avsc_to_pydantic}\n\n`def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass`\n\nGenerate pydantic model from given Avro Schema\n\n**Parameters**:\n- `schema`: Avro schema in dictionary format\n\n**Returns**:\n- Pydantic model class built from given avro schema\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/api/fastkafka/testing/ApacheKafkaBroker.md",
    "content": "## `fastkafka.testing.ApacheKafkaBroker` {#fastkafka.testing.ApacheKafkaBroker}\n\n\nApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nInitialises the ApacheKafkaBroker object\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.ApacheKafkaBroker) -> str`\n\nStarts a local kafka broker and zookeeper instance synchronously\n\n**Returns**:\n- Kafka broker bootstrap server address in string format: add:port\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None`\n\nStops a local kafka broker and zookeeper instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/api/fastkafka/testing/LocalRedpandaBroker.md",
    "content": "## `fastkafka.testing.LocalRedpandaBroker` {#fastkafka.testing.LocalRedpandaBroker}\n\n\nLocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None`\n\nInitialises the LocalRedpandaBroker object\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n### `get_service_config_string` {#get_service_config_string}\n\n`def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str`\n\nGenerates a configuration for a service\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `service`: \"redpanda\", defines which service to get config string for\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.LocalRedpandaBroker) -> str`\n\nStarts a local redpanda broker instance synchronously\n\n**Returns**:\n- Redpanda broker bootstrap server address in string format: add:port\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None`\n\nStops a local redpanda broker instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/api/fastkafka/testing/Tester.md",
    "content": "## `fastkafka.testing.Tester` {#fastkafka.testing.Tester}\n\n### `__init__` {#init}\n\n`def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nMirror-like object for testing a FastKafka application\n\nCan be used as context manager\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Union[~O, NoneType]]], typing.Callable[[~I], typing.Union[~O, NoneType]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]], typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n### `using_local_kafka` {#using_local_kafka}\n\n`def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester`\n\nStarts local Kafka broker used by the Tester instance\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n**Returns**:\n- An instance of tester with Kafka as broker\n\n### `using_local_redpanda` {#using_local_redpanda}\n\n`def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester`\n\nStarts local Redpanda broker used by the Tester instance\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n**Returns**:\n- An instance of tester with Redpanda as broker\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/cli/fastkafka.md",
    "content": "# `fastkafka`\n\n**Usage**:\n\n```console\n$ fastkafka [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `docs`: Commands for managing fastkafka app...\n* `run`: Runs Fast Kafka API application\n* `testing`: Commands for managing fastkafka testing\n\n## `fastkafka docs`\n\nCommands for managing fastkafka app documentation\n\n**Usage**:\n\n```console\n$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `generate`: Generates documentation for a FastKafka...\n* `install_deps`: Installs dependencies for FastKafka...\n* `serve`: Generates and serves documentation for a...\n\n### `fastkafka docs generate`\n\nGenerates documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs generate [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created  [default: .]\n* `--help`: Show this message and exit.\n\n### `fastkafka docs install_deps`\n\nInstalls dependencies for FastKafka documentation generation\n\n**Usage**:\n\n```console\n$ fastkafka docs install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n### `fastkafka docs serve`\n\nGenerates and serves documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs serve [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created  [default: .]\n* `--bind TEXT`: Some info  [default: 127.0.0.1]\n* `--port INTEGER`: Some info  [default: 8000]\n* `--help`: Show this message and exit.\n\n## `fastkafka run`\n\nRuns Fast Kafka API application\n\n**Usage**:\n\n```console\n$ fastkafka run [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--num-workers INTEGER`: Number of FastKafka instances to run, defaults to number of CPU cores.  [default: 8]\n* `--kafka-broker TEXT`: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.  [required]\n* `--help`: Show this message and exit.\n\n## `fastkafka testing`\n\nCommands for managing fastkafka testing\n\n**Usage**:\n\n```console\n$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `install_deps`: Installs dependencies for FastKafka app...\n\n### `fastkafka testing install_deps`\n\nInstalls dependencies for FastKafka app testing\n\n**Usage**:\n\n```console\n$ fastkafka testing install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/cli/run_fastkafka_server_process.md",
    "content": "# `run_fastkafka_server_process`\n\n**Usage**:\n\n```console\n$ run_fastkafka_server_process [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--kafka-broker TEXT`: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.  [required]\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_00_FastKafka_Demo.md",
    "content": "FastKafka tutorial\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use Python\nlibrary for building asynchronous services that interact with Kafka\ntopics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\n``` python\ntry:\n    import fastkafka\nexcept:\n    ! pip install fastkafka\n```\n\n## Running in Colab\n\nYou can start this interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\" />\n</a>\n\n## Writing server code\n\nHere is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic.\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/usage/types/#constrained-types),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server.\n\nNext, an object of the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstances which internally starts Kafka broker and zookeeper.\n\nBefore running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:\n\n``` sh\nfastkafka testing install_deps\n```\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send IrisInputData message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with IrisPrediction in predictions topic\n    await tester.awaited_mocks.on_predictions.assert_awaited_with(\n        IrisPrediction(species=\"setosa\"), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purpuoses\n\n3.  Sent IrisInputData message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to IrisInputData message\n\n## Running the service\n\nThe service can be started using builtin `faskafka run` CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nTo run the service, you will need a running Kafka broker on localhost as\nspecified in the `kafka_brokers` parameter above. We can start the Kafka\nbroker locally using the\n[`ApacheKafkaBroker`](../api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker).\nNotice that the same happens automatically in the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\nas shown above.\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n    '127.0.0.1:9092'\n\nThen, we start the FastKafka service by running the following command in\nthe folder where the `application.py` file is located:\n\n``` sh\nfastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n```\n\nIn the above command, we use `--num-workers` option to specify how many\nworkers to launch and we use `--kafka-broker` option to specify which\nkafka broker configuration to use from earlier specified `kafka_brokers`\n\n    [1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n    [1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n    ^C\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n\nYou need to interupt running of the cell above by selecting\n`Runtime->Interupt execution` on the toolbar above.\n\nFinally, we can stop the local Kafka Broker:\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nWhen running in Colab, we need to update Node.js first:\n\nWe need to install all dependancies for the generator using the\nfollowing command line:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfolloving command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n. This will generate the *asyncapi* folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\n    drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    ^C\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    bootstrap_servers=\"localhost:9092\",\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_01_Intro.md",
    "content": "Intro\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nThis tutorial will show you how to use <b>FastKafkaAPI</b>, step by\nstep.\n\nThe goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel.\n\nIn this Intro tutorial we’ll go trough the basic requirements to run the\ndemos presented in future steps.\n\n## Installing FastKafkaAPI\n\nFirst step is to install FastKafkaAPI\n\n``` shell\n$ pip install fastkafka\n```\n\n## Preparing a Kafka broker\n\nNext step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication.\n\n!!! info \"Hey, your first info!\"\n\n    If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n\nTo go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times).\n\n!!! warning \"Listen! This is important.\"\n\n    To be able to setup this configuration you need to have Docker and docker-compose installed\n\n    See here for more info on <a href = \\\"https://docs.docker.com/\\\" target=\\\"_blank\\\">Docker</a> and <a href = \\\"https://docs.docker.com/compose/install/\\\" target=\\\"_blank\\\">docker compose</a>\n\nTo setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g. fastkafka_demo). Inside the new\nfolder create a new YAML file named <b>kafka_demo.yml</b> and copy the\nfollowing configuration into it:\n\n``` yaml\nversion: \"3\"\nservices:\n    zookeeper:\n        image: wurstmeister/zookeeper\n        hostname: zookeeper\n        container_name: zookeeper\n        networks:\n          - fastkafka-network\n        ports:\n          - \"2181:2181\"\n          - \"22:22\"\n          - \"2888:2888\"\n          - \"3888:3888\"\n    kafka:\n        image: wurstmeister/kafka\n        container_name: kafka\n        ports:\n          - \"9093:9093\"\n        environment:\n            HOSTNAME_COMMAND: \"docker info | grep ^Name: | cut -d' ' -f 2\"\n            KAFKA_ZOOKEEPER_CONNECT: \"zookeeper:2181\"\n            KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n            KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n            KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n            KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n            KAFKA_CREATE_TOPICS: \"hello:1:1\"\n        volumes:\n            - /var/run/docker.sock:/var/run/docker.sock\n        depends_on:\n            - zookeeper\n        healthcheck:\n            test: [ \"CMD\", \"kafka-topics.sh\", \"--list\", \"--zookeeper\", \"zookeeper:2181\" ]\n            interval: 5s\n            timeout: 10s\n            retries: 5\n        networks:\n          - fastkafka-network\nnetworks:\n    fastkafka-network:\n        name: \"fastkafka-network\"\n```\n\nThis configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a ‘hello’ topic (quite enough for a\nstart). To start the configuration, run:\n\n``` shell\n$ docker-compose -f kafka_demo.yaml up -d --wait\n```\n\nThis will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! :confetti_ball:\n\n## Running the code\n\nAfter installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the ‘First Steps’ part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem.\n\nYou are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_02_First_Steps.md",
    "content": "First Steps\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Creating a simple Kafka consumer app\n\nFor our first demo we will create the simplest possible Kafka consumer\nand run it using ‘fastkafka run’ command.\n\nThe consumer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n\n2.  Listen to the hello topic\n\n3.  Write any message received from the hello topic to stdout\n\nTo create the consumer, first, create a file named\n<b>hello_kafka_consumer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n    \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n    print(f\"Got data, msg={msg.msg}\", flush=True)\n```\n\n!!! info \"Kafka configuration\"\n\n    This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\n!!! warning \"Remember to flush\"\n\n    Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n\nTo run this consumer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n    [878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n\nNow you can interact with your consumer, by sending the messages to the\nsubscribed ‘hello’ topic, don’t worry, we will cover this in the next\nstep of this guide.\n\n## Sending first message to your consumer\n\nAfter we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly.\n\nIf you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic.\n\nFirst, connect to your running kafka broker by running:\n\n``` shell\ndocker run -it kafka /bin/bash\n```\n\nThen, when connected to the container, run:\n\n``` shell\nkafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n```\n\nThis will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer.\n\nIn the shell, type:\n\n``` shell\n{\"msg\":\"hello\"}\n```\n\nand press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout.\n\nCheck the output of your consumer (terminal where you ran the ‘fastkafka\nrun’ command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:\n\n``` shell\nGot data, msg=hello\n```\n\n## Creating a hello Kafka producer\n\nConsuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let’s create our first\nkafka producer which will send it’s greetings to our consumer\nperiodically.\n\nThe producer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n2.  Connect to the hello topic\n3.  Periodically send a message to the hello world topic\n\nTo create the producer, first, create a file named\n<b>hello_kafka_producer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n    logger.info(f\"Producing: {msg}\")\n    return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello(HelloKafkaMsg(msg=\"hello\"))\n        await asyncio.sleep(1)\n```\n\n!!! info \"Kafka configuration\"\n\n    This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\nTo run this producer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n\nNow, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log.\n\n## Recap\n\nIn this guide we have:\n\n1.  Created a simple Kafka consumer using FastKafka\n2.  Sent a message to our consumer trough Kafka\n3.  Created a simple Kafka producer using FastKafka\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_03_Authentication.md",
    "content": "Authentication\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## TLS Authentication\n\nsasl_mechanism (str) – Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN\n\nsasl_plain_username (str) – username for SASL PLAIN authentication.\nDefault: None\n\nsasl_plain_password (str) – password for SASL PLAIN authentication.\nDefault: None\n\nsasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_04_Github_Actions_Workflow.md",
    "content": "Deploy FastKafka docs to GitHub Pages\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Getting started\n\nAdd your workflow file `.github/workflows/fastkafka_docs_deploy.yml` and\npush it to your remote default branch.\n\nHere is an example workflow:\n\n``` yaml\nname: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n  push:\n    branches: [ \"main\", \"master\" ]\n  workflow_dispatch:\n\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n      - uses: airtai/workflows/fastkafka-ghp@main\n        with:\n          app: \"test_fastkafka.application:kafka_app\"\n```\n\n## Options\n\n### Set app location\n\nInput in the form of `path:app`, where `path` is the path to a Python\nfile and `app` is an object of type\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka):\n\n``` yaml\n- name: Deploy\n  uses: airtai/workflows/fastkafka-ghp@main\n  with:\n    app: \"test_fastkafka.application:kafka_app\"\n```\n\nIn the above example,\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp is named as `kafka_app` and it is available in the `application`\nsubmodule of the `test_fastkafka` module.\n\n## Example Repository\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to `Github Pages` can be found\n[here](https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_05_Lifespan_Handler.md",
    "content": "Lifespan Events\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nDid you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! :rocket:\n\nLets break it down:\n\nYou can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages.\n\nSimilarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up.\n\nBy executing code before consuming and after producing, you cover the\nentire lifecycle of your application :tada:\n\nThis is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!\n\nSo lets give it a try and see how it can make your Kafka app even more\nawesome! :muscle:\n\n## Lifespan example - Iris prediction model\n\nLet’s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don’t want to load them for every\nmessage.\n\nHere’s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we’ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication.\n\n### Lifespan\n\nYou can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager.\n\nLet’s start with an example and then see it in detail.\n\nWe create an async function lifespan() with yield like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \n```\n\nThe first thing to notice, is that we are defining an async function\nwith `yield`. This is very similar to Dependencies with `yield`.\n\nThe first part of the function, before the `yield`, will be executed\n**before** the application starts. And the part after the `yield` will\nbe executed **after** the application has finished.\n\nThis lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown.\n\nThe lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup.\n\nFor demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called.\n\n### Async context manager\n\nContext managers can be used in `with` blocks, our lifespan, for example\ncould be used like this:\n\n``` python\nml_models = {}\nasync with lifespan(None):\n    print(ml_models)\n```\n\nWhen you create a context manager or an async context manager, what it\ndoes is that, before entering the `with` block, it will execute the code\nbefore the `yield`, and after exiting the `with` block, it will execute\nthe code after the `yield`.\n\nIf you want to learn more about context managers and contextlib\ndecorators, please visit [Python official\ndocs](https://docs.python.org/3/library/contextlib.html)\n\n## App demo\n\n### FastKafka app\n\nLets now create our application using the created lifespan handler.\n\nNotice how we passed our lifespan handler to the app when constructing\nit trough the `lifespan` argument.\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n```\n\n### Data modeling\n\nLets model the Iris data for our app:\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Consumers and producers\n\nLets create a consumer and producer for our app that will generate\npredictions from input iris data.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Final app\n\nThe final app looks like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Running the app\n\nNow we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n```\n\nWhen you run the app, you should see a simmilar output to the one below:\n\n    [262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [262292]: Loading the model!\n    [262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n    [262292]: Exiting, clearing model dict!\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n\n## Recap\n\nIn this guide we have defined a lifespan handler and passed to our\nFastKafka app.\n\nSome important points are:\n\n1.  Lifespan handler is implemented as\n    [AsyncContextManager](https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager)\n2.  Code **before** yield in lifespan will be executed **before**\n    application **startup**\n3.  Code **after** yield in lifespan will be executed **after**\n    application **shutdown**\n4.  You can pass your lifespan handler to FastKafka app on\n    initialisation by passing a `lifespan` argument\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_06_Benchmarking_FastKafka.md",
    "content": "Benchmarking FastKafka app\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\nTo benchmark a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nproject, you will need the following:\n\n1.  A library built with\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka).\n2.  A running `Kafka` instance to benchmark the FastKafka application\n    against.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nhas a decorator for benchmarking which is appropriately called as\n`benchmark`. Let’s edit our `application.py` file and add the\n`benchmark` decorator to the consumes method.\n\n``` python\n# content of the \"application.py\" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nHere we are conducting a benchmark of a function that consumes data from\nthe `input_data` topic with an interval of 1 second and a sliding window\nsize of 5.\n\nThis `benchmark` method uses the `interval` parameter to calculate the\nresults over a specific time period, and the `sliding_window_size`\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation.\n\nThis benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement.\n\n### Starting Kafka\n\nIf you already have a `Kafka` running somewhere, then you can skip this\nstep.\n\nPlease keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself.\n\n#### Installing Java and Kafka\n\nWe need a working `Kafka`instance to benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp, and to run `Kafka` we need `Java`. Thankfully,\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\ncomes with a CLI to install both `Java` and `Kafka` on our machine.\n\nSo, let’s install `Java` and `Kafka` by executing the following command.\n\n``` cmd\nfastkafka testing install_deps\n```\n\nThe above command will extract `Kafka` scripts at the location\n“\\$HOME/.local/kafka_2.13-3.3.2\" on your machine.\n\n#### Creating configuration for Zookeeper and Kafka\n\nNow we need to start `Zookeeper` and `Kafka` separately, and to start\nthem we need `zookeeper.properties` and `kafka.properties` files.\n\nLet’s create a folder inside the folder where `Kafka` scripts were\nextracted and change directory into it.\n\n``` cmd\nmkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n```\n\nLet’s create a file called `zookeeper.properties` and write the\nfollowing content to the file:\n\n``` txt\ndataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n```\n\nSimilarly, let’s create a file called `kafka.properties` and write the\nfollowing content to the file:\n\n``` txt\nbroker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n```\n\n#### Starting Zookeeper and Kafka\n\nWe need two different terminals to run `Zookeeper` in one and `Kafka` in\nanother. Let’s open a new terminal and run the following commands to\nstart `Zookeeper`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n```\n\nOnce `Zookeeper` is up and running, open a new terminal and execute the\nfollwing commands to start `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n```\n\nNow we have both `Zookeeper` and `Kafka` up and running.\n\n#### Creating topics in Kafka\n\nIn a new terminal, please execute the following command to create\nnecessary topics in `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n```\n\n#### Populating topics with dummy data\n\nTo benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp, we need some data in `Kafka` topics.\n\nIn the same terminal, let’s create some dummy data:\n\n``` cmd\nyes '{\"sepal_length\": 0.7739560486, \"sepal_width\": 0.8636615789, \"petal_length\": 0.6122663046, \"petal_width\": 0.1338914722}' | head -n 1000000 > /tmp/test_data\n```\n\nThis command will create a file called `test_data` in the `tmp` folder\nwith one million rows of text. This will act as dummy data to populate\nthe `input_data` topic.\n\nLet’s populate the created topic `input_data` with the dummy data which\nwe created above:\n\n``` cmd\n./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n```\n\nNow our topic `input_data` has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again.\n\n### Benchmarking FastKafka\n\nOnce `Zookeeper` and `Kafka` are ready, benchmarking\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp is as simple as running the `fastkafka run` command:\n\n``` cmd\nfastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n```\n\nThis command will start the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp and begin consuming messages from `Kafka`, which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the `interval` and `sliding_window_size` values.\n\nThe output for the `fastkafka run` command is:\n\n``` txt\n[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n```\n\nBased on the output, when using 1 worker, our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp achieved a `throughput` of 93k messages per second and an\n`average throughput` of 93k messages per second.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",
    "content": "Encoding and Decoding Kafka Messages with FastKafka\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\n1.  A basic knowledge of\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    is needed to proceed with this guide. If you are not familiar with\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka),\n    please go through the [tutorial](/docs#tutorial) first.\n2.  [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    with its dependencies installed is needed. Please install\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    using the command - `pip install fastkafka`\n\n## Ways to Encode and Decode Messages with FastKafka\n\nIn python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings.\n\nIn FastKafka, we specify message schema using Pydantic models as\nmentioned in [tutorial](/docs#messages):\n\n``` python\n# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nThen, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics.\n\nThe `@consumes` and `@produces` methods of FastKafka accept a parameter\ncalled `decoder`/`encoder` to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:\n\n1.  json - This is the default encoder/decoder option in FastKafka.\n    While producing, this option converts our instance of Pydantic model\n    messages to a JSON string and then converts it to bytes before\n    sending it to the topic. While consuming, it converts bytes to a\n    JSON string and then constructs an instance of Pydantic model from\n    the JSON string.\n2.  avro - This option uses Avro encoding/decoding to convert instances\n    of Pydantic model messages to bytes while producing, and while\n    consuming, it constructs an instance of Pydantic model from bytes.\n3.  custom encoder/decoder - If you are not happy with the json or avro\n    encoder/decoder options, you can write your own encoder/decoder\n    functions and use them to encode/decode Pydantic messages.\n\n## 1. Json encoder and decoder\n\nThe default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string.\n\nWe can use the application from [tutorial](/docs#running-the-service) as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder\nparameter:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"json\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"json\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above code, the `@kafka_app.consumes` decorator sets up a\nconsumer for the “input_data\" topic, using the ‘json’ decoder to convert\nthe message payload to an instance of `IrisInputData`. The\n`@kafka_app.produces` decorator sets up a producer for the “predictions\"\ntopic, using the ‘json’ encoder to convert the instance of\n`IrisPrediction` to message payload.\n\n## 2. Avro encoder and decoder\n\n### What is Avro?\n\nAvro is a row-oriented remote procedure call and data serialization\nframework developed within Apache’s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n[docs](https://avro.apache.org/docs/).\n\n### Installing FastKafka with Avro dependencies\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nwith Avro support using the command - `pip install fastkafka[avro]`\n\n### Defining Avro Schema Using Pydantic Models\n\nBy default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models.\n\nSo, similar to the [tutorial](/docs#tutorial), the message schema will\nremain as it is.\n\n``` python\n# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nNo need to change anything to support avro. You can use existing\nPydantic models as is.\n\n### Reusing existing avro schema\n\nIf you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined.\n\n#### Building pydantic models from avro schema dictionary\n\nLet’s modify the above example and let’s assume we have schemas already\nfor `IrisInputData` and `IrisPrediction` which will look like below:\n\n``` python\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n```\n\nWe can easily construct pydantic models from avro schema using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction which is included as part of\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nitself.\n\n``` python\nfrom fastkafka._components.encoder.avro import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\nThe above code will convert avro schema to pydantic models and will\nprint pydantic models’ fields. The output of the above is:\n\n``` txt\n{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n```\n\nThis is exactly same as manually defining the pydantic models ourselves.\nYou don’t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction as demonstrated above.\n\n#### Building pydantic models from `.avsc` file\n\nNot all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary `.avsc` files in\nfilesystem. Let’s see how to convert those `.avsc` files to pydantic\nmodels.\n\nLet’s assume our avro files are stored in files called\n`iris_input_data_schema.avsc` and `iris_prediction_schema.avsc`. In that\ncase, following code converts the schema to pydantic models:\n\n``` python\nimport json\nfrom fastkafka._components.encoder.avro import avsc_to_pydantic\n\n\nwith open(\"iris_input_data_schema.avsc\", \"rb\") as f:\n    iris_input_data_schema = json.load(f)\n    \nwith open(\"iris_prediction_schema.avsc\", \"rb\") as f:\n    iris_prediction_schema = json.load(f)\n    \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\n### Consume/Produce avro messages with FastKafka\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nprovides `@consumes` and `@produces` methods to consume/produces\nmessages to/from a `Kafka` topic. This is explained in\n[tutorial](/docs#function-decorators).\n\nThe `@consumes` and `@produces` methods accepts a parameter called\n`decoder`/`encoder` to decode/encode avro messages.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", encoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", decoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above example, in `@consumes` and `@produces` methods, we\nexplicitly instruct FastKafka to `decode` and `encode` messages using\nthe `avro` `decoder`/`encoder` instead of the default `json`\n`decoder`/`encoder`.\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use `avro` to `decode` and\n`encode` messages:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka._components.encoder.avro import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThe above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages.\n\nThe\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is then instantiated with the broker details, and two functions\ndecorated with `@kafka_app.consumes` and `@kafka_app.produces` are\ndefined to consume messages from the “input_data\" topic and produce\nmessages to the “predictions\" topic, respectively. The functions uses\nthe decoder=“avro\" and encoder=“avro\" parameters to decode and encode\nthe Avro messages.\n\nIn summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline.\n\n## 3. Custom encoder and decoder\n\nIf you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages.\n\n### Writing a custom encoder and decoder\n\nIn this section, let’s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n[ROT13](https://en.wikipedia.org/wiki/ROT13) cipher.\n\n``` python\nimport codecs\nimport json\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n```\n\nThe above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library.\n\nThe encoding function, `custom_encoder()`, takes a message `msg` which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe `json()` method, obfuscates the resulting string using the ROT13\nalgorithm from the `codecs` module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding.\n\nThe decoding function, `custom_decoder()`, takes a raw message `raw_msg`\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the `json.loads()` method and returns a\nnew instance of the specified `cls` class initialized with the decoded\ndictionary.\n\nThese functions can be used with FastKafka’s `encoder` and `decoder`\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics.\n\nLet’s test the above code\n\n``` python\ni = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n```\n\nThis will result in following output\n\n``` txt\nb'{\"frcny_yratgu\": 0.5, \"frcny_jvqgu\": 0.5, \"crgny_yratgu\": 0.5, \"crgny_jvqgu\": 0.5}'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n```\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use our custom decoder and\nencoder functions:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\n\nimport codecs\nimport json\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThis code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system.\n\nThe custom `encoder` function takes a message represented as a\n`BaseModel` and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned.\n\nThe custom `decoder` function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the `json`\nmodule. This dictionary is then converted to a `BaseModel` instance\nusing the cls parameter.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_11_Consumes_Basics.md",
    "content": "@consumes basics\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@consumes` decorator to consume messages from Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will consume\n`HelloWorld` messages from hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n\nTo use the `@consumes` decorator, first we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to\nconsume from the topic using [pydantic](https://docs.pydantic.dev/). For\nthe guide we’ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a consumer function and decorate it with `@consumes`\n\nLet’s create a consumer function that will consume `HelloWorld` messages\nfrom *hello_world* topic and log them.\n\n``` python\n\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nThe function decorated with the `@consumes` decorator will be called\nwhen a message is produced to Kafka.\n\nThe message will then be injected into the typed *msg* argument of the\nfunction and its type will be used to parse the message.\n\nIn this example case, when the message is sent into a *hello_world*\ntopic, it will be parsed into a HelloWorld class and `on_hello_world`\nfunction will be called with the parsed class as *msg* argument value.\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in consumer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [382372]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [382372]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [382372]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [382372]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    [382372]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 382372...\n    [382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 382372 terminated.\n\n## Send the message to kafka topic\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are\nreceiving the message from, this is because the `@consumes` decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default “on\\_\" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n*hello_world*.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nconsumes decorator, like this:\n\n``` python\n\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes(prefix=\"read_from_\")\nasync def read_from_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in consumes decorator, like this:\n\n``` python\n\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes(topic=\"my_special_topic\")\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\n## Message data\n\nThe message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of *msg* parameter in the\nfunction decorated by the `@consumes` decorator.\n\nIn this example case, the message will be parsed into a `HelloWorld`\nclass.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_21_Produces_Basics.md",
    "content": "@produces basics\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@produces` decorator to produce messages to Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n\nTo use the `@produces` decorator, frist we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to send\nto the topic using [pydantic](https://docs.pydantic.dev/). For the guide\nwe’ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a producer function and decorate it with `@produces`\n\nLet’s create a producer function that will produce `HelloWorld` messages\nto *hello_world* topic:\n\n``` python\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nNow you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic.\n\nBy default, the topic is determined from your function name, the “to\\_\"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is *hello_world*.\n\n## Instruct the app to start sending HelloWorld messages\n\nLet’s use `@run_in_background` decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second.\n\n``` python\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\n``` python\nscript_file = \"producer_example.py\"\ncmd = \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\"\nmd(\n    f\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```\"\n)\n```\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n\n## Check if the message was sent to the Kafka topic\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the {“msg\": “Hello world!\"} messages in your topic.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are sending\nthe message to, this is because the `@produces` decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default “to\\_\" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n*hello_world*.\n\n!!! warn \"New topics\"\n\n    Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nproduces decorator, like this:\n\n``` python\n\n@app.produces(prefix=\"send_to_\")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in produces decorator, like this:\n\n``` python\n\n@app.produces(topic=\"my_special_topic\")\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\n## Message data\n\nThe return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app.\n\nIn this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n`b'{\"msg\": \"Hello world!\"}'`\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_22_Partition_Keys.md",
    "content": "Defining a partition key\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nPartition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance.\n\nYou can define your partition keys when using the `@produces` decorator,\nthis guide will demonstrate to you this feature.\n\n## Return a key from the producing function\n\nTo define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass and set the key value. Check the example below:\n\n``` python\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n```\n\nIn the example, we want to return the `HelloWorld` message class with\nthe key defined as *my_key*. So, we wrap the message and key into a\nKafkaEvent class and return it as such.\n\nWhile generating the documentation, the\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass will be unwrapped and the `HelloWorld` class will be documented in\nthe definition of message type, same way if you didn’t use the key.\n\n!!! info \"Which key to choose?\"\n\n    Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n\n## App example\n\nWe will modify the app example from **@producer basics** guide to return\nthe `HelloWorld` with our key. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n\n## Check if the message was sent to the Kafka topic with the desired key\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic with the defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the *my_key {“msg\": “Hello world!\"}* messages in your\ntopic appearing, the *my_key* part of the message is the key that we\ndefined in our producing function.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",
    "content": "Deploying FastKafka using Docker\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Building a Docker Image\n\nTo build a Docker image for a FastKafka project, we need the following\nitems:\n\n1.  A library that is built using FastKafka.\n2.  A file in which the requirements are specified. This could be a\n    requirements.txt file, a setup.py file, or even a wheel file.\n3.  A Dockerfile to build an image that will include the two files\n    mentioned above.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Creating requirements.txt file\n\nThe above code only requires `fastkafka`. So, we will add only\n`fastkafka` to the `requirements.txt` file, but you can add additional\nrequirements to it as well.\n\n``` txt\nfastkafka>=0.3.0\n```\n\nHere we are using `requirements.txt` to store the project’s\ndependencies. However, other methods like `setup.py`, `pipenv`, and\n`wheel` files can also be used. `setup.py` is commonly used for\npackaging and distributing Python modules, while `pipenv` is a tool used\nfor managing virtual environments and package dependencies. `wheel`\nfiles are built distributions of Python packages that can be installed\nwith pip.\n\n### Creating Dockerfile\n\n``` dockerfile\n# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD [\"fastkafka\", \"run\", \"--num-workers\", \"2\", \"--kafka-broker\", \"production\", \"application:kafka_app\"]\n```\n\n1.  Start from the official Python base image.\n\n2.  Set the current working directory to `/project`.\n\n    This is where we’ll put the `requirements.txt` file and the\n    `application.py` file.\n\n3.  Copy the `application.py` file and `requirements.txt` file inside\n    the `/project` directory.\n\n4.  Install the package dependencies in the requirements file.\n\n    The `--no-cache-dir` option tells `pip` to not save the downloaded\n    packages locally, as that is only if `pip` was going to be run again\n    to install the same packages, but that’s not the case when working\n    with containers.\n\n    The `--upgrade` option tells `pip` to upgrade the packages if they\n    are already installed.\n\n5.  Set the **command** to run the `fastkafka run` command.\n\n    `CMD` takes a list of strings, each of these strings is what you\n    would type in the command line separated by spaces.\n\n    This command will be run from the **current working directory**, the\n    same `/project` directory you set above with `WORKDIR /project`.\n\n    We supply additional parameters `--num-workers` and `--kafka-broker`\n    for the run command. Finally, we specify the location of our\n    `fastkafka` application location as a command argument.\n\n    To learn more about `fastkafka run` command please check the [CLI\n    docs](../../cli/fastkafka/#fastkafka-run).\n\n### Build the Docker Image\n\nNow that all the files are in place, let’s build the container image.\n\n1.  Go to the project directory (where your `Dockerfile` is, containing\n    your `application.py` file).\n\n2.  Run the following command to build the image:\n\n    ``` cmd\n    docker build -t fastkafka_project_image .\n    ```\n\n    This command will create a docker image with the name\n    `fastkafka_project_image` and the `latest` tag.\n\nThat’s it! You have now built a docker image for your FastKafka project.\n\n### Start the Docker Container\n\nRun a container based on the built image:\n\n``` cmd\ndocker run -d --name fastkafka_project_container fastkafka_project_image\n```\n\n## Additional Security\n\n`Trivy` is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here’s\nhow you can use `trivy` to scan your `fastkafka_project_image`:\n\n1.  Install `trivy` on your local machine by following the instructions\n    provided in the [official `trivy`\n    documentation](https://aquasecurity.github.io/trivy/latest/getting-started/installation/).\n\n2.  Run the following command to scan your fastkafka_project_image:\n\n    ``` cmd\n    trivy image fastkafka_project_image\n    ```\n\n    This command will scan your `fastkafka_project_image` for any\n    vulnerabilities and provide you with a report of its findings.\n\n3.  Fix any vulnerabilities identified by `trivy`. You can do this by\n    updating the vulnerable package to a more secure version or by using\n    a different package altogether.\n\n4.  Rebuild your `fastkafka_project_image` and repeat steps 2 and 3\n    until `trivy` reports no vulnerabilities.\n\nBy using `trivy` to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities.\n\n## Example repo\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n[here](https://github.com/airtai/sample_fastkafka_project/)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",
    "content": "Using Redpanda to test FastKafka\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## What is FastKafka?\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## What is Redpanda?\n\nRedpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda.\n\nFrom [redpanda.com](https://redpanda.com/):\n\n> Redpanda is a Kafka®-compatible streaming data platform that is proven\n> to be 10x faster and 6x lower in total costs. It is also JVM-free,\n> ZooKeeper®-free, Jepsen-tested and source available.\n\nSome of the advantages of Redpanda over Kafka are\n\n1.  A single binary with built-in everything, no ZooKeeper® or JVM\n    needed.\n2.  Costs upto 6X less than Kafka.\n3.  Up to 10x lower average latencies and up to 6x faster Kafka\n    transactions without compromising correctness.\n\nTo learn more about Redpanda, please visit their\n[website](https://redpanda.com/) or checkout this [blog\npost](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\ncomparing Redpanda and Kafka’s performance benchmarks.\n\n## Example repo\n\nA sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n[here](https://github.com/airtai/sample_fastkafka_with_redpanda).\n\n## The process\n\nHere are the steps we’ll be walking through to build our example:\n\n1.  Set up the prerequisites.\n2.  Clone the example repo.\n3.  Explain how to write an application using FastKafka.\n4.  Explain how to write a test case to test FastKafka with Redpanda.\n5.  Run the test case and produce/consume messages.\n\n## 1. Prerequisites\n\nBefore starting, make sure you have the following prerequisites set up:\n\n1.  **Python 3.x**: A Python 3.x installation is required to run\n    FastKafka. You can download the latest version of Python from the\n    [official website](https://www.python.org/downloads/). You’ll also\n    need to have pip installed and updated, which is Python’s package\n    installer.\n2.  **Docker Desktop**: Docker is used to run Redpanda, which is\n    required for testing FastKafka. You can download and install Docker\n    Desktop from the [official\n    website](https://www.docker.com/products/docker-desktop/).\n3.  **Git**: You’ll need to have Git installed to clone the example\n    repo. You can download Git from the [official\n    website](https://git-scm.com/downloads).\n\n## 2. Cloning and setting up the example repo\n\nTo get started with the example code, clone the [GitHub\nrepository](https://github.com/airtai/sample_fastkafka_with_redpanda) by\nrunning the following command in your terminal:\n\n``` cmd\ngit clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n```\n\nThis will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files.\n\n### Create a virtual environment\n\nBefore writing any code, let’s [create a new virtual\nenvironment](https://docs.python.org/3/library/venv.html#module-venv)\nfor our project.\n\nA virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects.\n\nTo create a new virtual environment, run the following commands in your\nterminal:\n\n``` cmd\npython3 -m venv venv\n```\n\nThis will create a new directory called `venv` in your project\ndirectory, which will contain the virtual environment.\n\nTo activate the virtual environment, run the following command:\n\n``` cmd\nsource venv/bin/activate\n```\n\nThis will change your shell’s prompt to indicate that you are now\nworking inside the virtual environment.\n\nFinally, run the following command to upgrade `pip`, the Python package\ninstaller:\n\n``` cmd\npip install --upgrade pip\n```\n\n### Install Python dependencies\n\nNext, let’s install the required Python dependencies. In this guide,\nwe’ll be using\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nto write our application code and `pytest` and `pytest-asyncio` to test\nit.\n\nYou can install the dependencies from the `requirements.txt` file\nprovided in the cloned repository by running:\n\n``` cmd\npip install -r requirements.txt\n```\n\nThis will install all the required packages and their dependencies.\n\n## 3. Writing server code\n\nThe `application.py` file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker.\n\nNext, an instance of the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum required arguments:\n\n- `kafka_brokers`: a dictionary used for generating documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## 4. Writing the test code\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstance which can be configured to start a [Redpanda\nbroker](../../api/fastkafka/testing/LocalRedpandaBroker/) for testing\npurposes. The `test.py` file in the cloned repository contains the\nfollowing code for testing.\n\n``` python\nimport pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n    # Start Tester app and create local Redpanda broker for testing\n    async with Tester(kafka_app).using_local_redpanda(\n        tag=\"v23.1.2\", listener_port=9092\n    ) as tester:\n        # Send IrisInputData message to input_data topic\n        await tester.to_input_data(msg)\n\n        # Assert that the kafka_app responded with IrisPrediction in predictions topic\n        await tester.awaited_mocks.on_predictions.assert_awaited_with(\n            IrisPrediction(species=\"setosa\"), timeout=2\n        )\n```\n\nThe\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\nmodule utilizes uses\n[`LocalRedpandaBroker`](../api/fastkafka/testing/LocalRedpandaBroker.md/#fastkafka.testing.LocalRedpandaBroker)\nto start and stop a Redpanda broker for testing purposes using Docker\n\n## 5. Running the tests\n\nWe can run the tests which is in `test.py` file by executing the\nfollowing command:\n\n``` cmd\npytest test.py\n```\n\nThis will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:\n\n``` cmd\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item                                                                 \n\ntest.py .                                                                  [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n```\n\nRunning the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable.\n\n### Recap\n\nWe have created an Iris classification model and encapulated it into our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napplication. The app will consume the `IrisInputData` from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our\n    [`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\n    class with `Redpanda` broker which mirrors the developed app topics\n    for testing purposes\n\n3.  Sent `IrisInputData` message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to `IrisInputData` message\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/index.md",
    "content": "FastKafka\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n<b>Effortless Kafka integration for your web services</b>\n\n------------------------------------------------------------------------\n\n![PyPI](https://img.shields.io/pypi/v/fastkafka.png) ![PyPI -\nDownloads](https://img.shields.io/pypi/dm/fastkafka.png) ![PyPI - Python\nVersion](https://img.shields.io/pypi/pyversions/fastkafka.png)\n\n![GitHub Workflow\nStatus](https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml)\n![CodeQL](https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg)\n![Dependency\nReview](https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg)\n\n![GitHub](https://img.shields.io/github/license/airtai/fastkafka.png)\n\n------------------------------------------------------------------------\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n------------------------------------------------------------------------\n\n#### ⭐⭐⭐ Stay in touch ⭐⭐⭐\n\nPlease show your support and stay in touch by:\n\n- giving our [GitHub repository](https://github.com/airtai/fastkafka/) a\n  star, and\n\n- joining our [Discord server](https://discord.gg/CJWmYpyFbc).\n\nYour support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!\n\n------------------------------------------------------------------------\n\n#### 🐝🐝🐝 We were busy lately 🐝🐝🐝\n\n![Activity](https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg \"Repobeats analytics image\")\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of `fastkafka` with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\nTo install fastkafka with testing features please use:\n\n``` sh\npip install fastkafka[test]\n```\n\nTo install fastkafka with asyncapi docs please use:\n\n``` sh\npip install fastkafka[docs]\n```\n\nTo install fastkafka with all the features please use:\n\n``` sh\npip install fastkafka[test,docs]\n```\n\n## Tutorial\n\nYou can start an interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\" />\n</a>\n\n## Writing server code\n\nHere is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic.\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker.\n\nNext, an object of the\n[`FastKafka`](./api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](./api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstances which internally starts InMemory implementation of Kafka\nbroker.\n\nThe Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies.\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send IrisInputData message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with IrisPrediction in predictions topic\n    await tester.awaited_mocks.on_predictions.assert_awaited_with(\n        IrisPrediction(species=\"setosa\"), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purposes\n\n3.  Sent IrisInputData message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to IrisInputData message\n\n## Running the service\n\nThe service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nTo run the service, you will need a running Kafka broker on localhost as\nspecified in the `kafka_brokers` parameter above. We can start the Kafka\nbroker locally using the\n[`ApacheKafkaBroker`](./api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker).\n\nTo use\n[`ApacheKafkaBroker`](./api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker),\nyou need to install JRE and Kafka to your environment. To simplify this\nprocess, fastkafka comes with a CLI command that does just that, to run\nit, in your terminal execute the following:\n\n``` sh\nfastkafka testing install_deps\n```\n\nNow we can run\n[`ApacheKafkaBroker`](./api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker)\nthat will start a Kafka broker instance for us.\n\n``` python\nfrom fastkafka.testing import ApacheKafkaBroker\n\nbroker = ApacheKafkaBroker(apply_nest_asyncio=True)\n\nbroker.start()\n```\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n    '127.0.0.1:9092'\n\nThen, we start the FastKafka service by running the following command in\nthe folder where the `application.py` file is located:\n\n``` sh\nfastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n```\n\nIn the above command, we use `--num-workers` option to specify how many\nworkers to launch and we use `--kafka-broker` option to specify which\nkafka broker configuration to use from earlier specified `kafka_brokers`\n\n    [801767]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [801765]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [801767]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [801765]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [801767]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [801767]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [801765]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [801765]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [801765]: [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n    [801765]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [801767]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [801767]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [801767]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [801765]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [801767]: [ERROR] aiokafka: Unable to update metadata from [0]\n    [801765]: [ERROR] aiokafka: Unable to update metadata from [0]\n    ^C\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801765...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801767...\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n\nYou need to interupt running of the cell above by selecting\n`Runtime->Interupt execution` on the toolbar above.\n\nFinally, we can stop the local Kafka Broker:\n\n``` python\nbroker.stop()\n```\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 801303...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 801303 was already terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 800930...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 800930 was already terminated.\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nAsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfolloving command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n\n. This will generate the *asyncapi* folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 09:14 docs\n    drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 09:14 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    ^C\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n\n## License\n\nFastKafka is licensed under the Apache License 2.0\n\nA permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code.\n\nThe full text of the license can be found\n[here](https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/overrides/css/extra.css",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/overrides/js/extra.js",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/overrides/js/math.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.5.0/overrides/js/mathjax.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/CHANGELOG.md",
    "content": "# Release notes\n\n<!-- do not remove -->\n\n## 0.6.0\n\n### New Features\n\n- Timestamps added to CLI commands ([#283](https://github.com/airtai/fastkafka/pull/283)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Added option to process messages concurrently ([#278](https://github.com/airtai/fastkafka/pull/278)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - A new `executor` option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.\n\n- Add consumes and produces functions to app ([#274](https://github.com/airtai/fastkafka/pull/274)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Add batching for producers ([#273](https://github.com/airtai/fastkafka/pull/273)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Fix broken links in guides ([#272](https://github.com/airtai/fastkafka/pull/272)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Generate the docusaurus sidebar dynamically by parsing summary.md ([#270](https://github.com/airtai/fastkafka/pull/270)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Metadata passed to consumer ([#269](https://github.com/airtai/fastkafka/pull/269)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(key): read the key value somehow..Maybe I missed something in the docs\n    requirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations.\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Contribution with instructions how to build and test added ([#255](https://github.com/airtai/fastkafka/pull/255)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Export encoders, decoders from fastkafka.encoder ([#246](https://github.com/airtai/fastkafka/pull/246)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n- Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. ([#239](https://github.com/airtai/fastkafka/issues/239))\n\n\n- UI Improvement: Post screenshots with links to the actual messages in testimonials section ([#228](https://github.com/airtai/fastkafka/issues/228))\n\n### Bugs Squashed\n\n- Batch testing fix ([#280](https://github.com/airtai/fastkafka/pull/280)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Tester breaks when using Batching or KafkaEvent producers ([#279](https://github.com/airtai/fastkafka/issues/279))\n\n- Consumer loop callbacks are not executing in parallel ([#276](https://github.com/airtai/fastkafka/issues/276))\n\n\n## 0.5.0\n\n### New Features\n\n- Significant speedup of Kafka producer ([#236](https://github.com/airtai/fastkafka/pull/236)), thanks to [@Sternakt](https://github.com/Sternakt)\n \n\n- Added support for AVRO encoding/decoding ([#231](https://github.com/airtai/fastkafka/pull/231)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n### Bugs Squashed\n\n- Fixed sidebar to include guides in docusaurus documentation ([#238](https://github.com/airtai/fastkafka/pull/238)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Fixed link to symbols in docusaurus docs ([#227](https://github.com/airtai/fastkafka/pull/227)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Removed bootstrap servers from constructor ([#220](https://github.com/airtai/fastkafka/pull/220)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n## 0.4.0\n\n### New Features\n\n- Integrate fastkafka chat ([#208](https://github.com/airtai/fastkafka/pull/208)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add benchmarking ([#206](https://github.com/airtai/fastkafka/pull/206)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Enable fast testing without running kafka locally ([#198](https://github.com/airtai/fastkafka/pull/198)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Generate docs using Docusaurus ([#194](https://github.com/airtai/fastkafka/pull/194)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add test cases for LocalRedpandaBroker ([#189](https://github.com/airtai/fastkafka/pull/189)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Reimplement patch and delegates from fastcore ([#188](https://github.com/airtai/fastkafka/pull/188)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Rename existing functions into start and stop and add lifespan handler ([#117](https://github.com/airtai/fastkafka/issues/117))\n  - https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios\n\n\n## 0.3.1\n\n-  README.md file updated\n\n\n## 0.3.0\n\n### New Features\n\n- Guide for fastkafka produces using partition key ([#172](https://github.com/airtai/fastkafka/pull/172)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #161\n\n- Add support for Redpanda for testing and deployment ([#181](https://github.com/airtai/fastkafka/pull/181)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Remove bootstrap_servers from __init__ and use the name of broker as an option when running/testing ([#134](https://github.com/airtai/fastkafka/issues/134))\n\n- Add a GH action file to check for broken links in the docs ([#163](https://github.com/airtai/fastkafka/issues/163))\n\n- Optimize requirements for testing and docs ([#151](https://github.com/airtai/fastkafka/issues/151))\n\n- Break requirements into base and optional for testing and dev ([#124](https://github.com/airtai/fastkafka/issues/124))\n  - Minimize base requirements needed just for running the service.\n\n- Add link to example git repo into guide for building docs using actions ([#81](https://github.com/airtai/fastkafka/issues/81))\n\n- Add logging for run_in_background ([#46](https://github.com/airtai/fastkafka/issues/46))\n\n- Implement partition Key mechanism for producers ([#16](https://github.com/airtai/fastkafka/issues/16))\n\n### Bugs Squashed\n\n- Implement checks for npm installation and version ([#176](https://github.com/airtai/fastkafka/pull/176)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #158 by checking if the npx is installed and more verbose error handling\n\n- Fix the helper.py link in CHANGELOG.md ([#165](https://github.com/airtai/fastkafka/issues/165))\n\n- fastkafka docs install_deps fails ([#157](https://github.com/airtai/fastkafka/issues/157))\n  - Unexpected internal error: [Errno 2] No such file or directory: 'npx'\n\n- Broken links in docs ([#141](https://github.com/airtai/fastkafka/issues/141))\n\n- fastkafka run is not showing up in CLI docs ([#132](https://github.com/airtai/fastkafka/issues/132))\n\n\n## 0.2.3\n\n- Fixed broken links on PyPi index page\n\n\n## 0.2.2\n\n### New Features\n\n- Extract JDK and Kafka installation out of LocalKafkaBroker ([#131](https://github.com/airtai/fastkafka/issues/131))\n\n- PyYAML version relaxed ([#119](https://github.com/airtai/fastkafka/pull/119)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Replace docker based kafka with local ([#68](https://github.com/airtai/fastkafka/issues/68))\n  - [x] replace docker compose with a simple docker run (standard run_jupyter.sh should do)\n  - [x] replace all tests to use LocalKafkaBroker\n  - [x] update documentation\n\n### Bugs Squashed\n\n- Fix broken link for FastKafka docs in index notebook ([#145](https://github.com/airtai/fastkafka/issues/145))\n\n- Fix encoding issues when loading setup.py on windows OS ([#135](https://github.com/airtai/fastkafka/issues/135))\n\n\n## 0.2.0\n\n### New Features\n\n- Replace kafka container with LocalKafkaBroker ([#112](https://github.com/airtai/fastkafka/issues/112))\n  - - [x] Replace kafka container with LocalKafkaBroker in tests\n- [x] Remove kafka container from tests environment\n- [x] Fix failing tests\n\n### Bugs Squashed\n\n- Fix random failing in CI ([#109](https://github.com/airtai/fastkafka/issues/109))\n\n\n## 0.1.3\n\n- version update in __init__.py\n\n\n## 0.1.2\n\n### New Features\n\n\n- Git workflow action for publishing Kafka docs ([#78](https://github.com/airtai/fastkafka/issues/78))\n\n\n### Bugs Squashed\n\n- Include missing requirement ([#110](https://github.com/airtai/fastkafka/issues/110))\n  - [x] Typer is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py) but it is not included in [settings.ini](https://github.com/airtai/fastkafka/blob/main/settings.ini)\n  - [x] Add aiohttp which is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py)\n  - [x] Add nbformat which is imported in _components/helpers.py\n  - [x] Add nbconvert which is imported in _components/helpers.py\n\n\n## 0.1.1\n\n\n### Bugs Squashed\n\n- JDK install fails on Python 3.8 ([#106](https://github.com/airtai/fastkafka/issues/106))\n\n\n\n## 0.1.0\n\nInitial release\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/CNAME",
    "content": "fastkafka.airt.ai\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/CONTRIBUTING.md",
    "content": "# Contributing to fastkafka\n\nFirst off, thanks for taking the time to contribute! ❤️\n\nAll types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉\n\n> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:\n> - Star the project\n> - Tweet about it\n> - Refer this project in your project's readme\n> - Mention the project at local meetups and tell your friends/colleagues\n\n## Table of Contents\n\n- [I Have a Question](#i-have-a-question)\n- [I Want To Contribute](#i-want-to-contribute)\n  - [Reporting Bugs](#reporting-bugs)\n  - [Suggesting Enhancements](#suggesting-enhancements)\n- [Development](#development)\n    - [Prepare the dev environment](#prepare-the-dev-environment)\n    - [Way of working](#way-of-working)\n    - [Before a PR](#before-a-pr)\n\n\n\n## I Have a Question\n\n> If you want to ask a question, we assume that you have read the available [Documentation](https://fastkafka.airt.ai/docs).\n\nBefore you ask a question, it is best to search for existing [Issues](https://github.com/airtai/fastkafka/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.\n\nIf you then still feel the need to ask a question and need clarification, we recommend the following:\n\n- Contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new)\n    - Provide as much context as you can about what you're running into\n\nWe will then take care of the issue as soon as possible.\n\n## I Want To Contribute\n\n> ### Legal Notice \n> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.\n\n### Reporting Bugs\n\n#### Before Submitting a Bug Report\n\nA good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.\n\n- Make sure that you are using the latest version.\n- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](https://fastkafka.airt.ai/docs). If you are looking for support, you might want to check [this section](#i-have-a-question)).\n- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/airtai/fastkafka/issues?q=label%3Abug).\n- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.\n- Collect information about the bug:\n  - Stack trace (Traceback)\n  - OS, Platform and Version (Windows, Linux, macOS, x86, ARM)\n  - Python version\n  - Possibly your input and the output\n  - Can you reliably reproduce the issue? And can you also reproduce it with older versions?\n\n#### How Do I Submit a Good Bug Report?\n\nWe use GitHub issues to track bugs and errors. If you run into an issue with the project:\n\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)\n- Explain the behavior you would expect and the actual behavior.\n- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.\n- Provide the information you collected in the previous section.\n\nOnce it's filed:\n\n- The project team will label the issue accordingly.\n- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.\n- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented.\n\n### Suggesting Enhancements\n\nThis section guides you through submitting an enhancement suggestion for fastkafka, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.\n\n#### Before Submitting an Enhancement\n\n- Make sure that you are using the latest version.\n- Read the [documentation](https://fastkafka.airt.ai/docs) carefully and find out if the functionality is already covered, maybe by an individual configuration.\n- Perform a [search](https://github.com/airtai/fastkafka/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.\n- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.\n- If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n\n#### How Do I Submit a Good Enhancement Suggestion?\n\nEnhancement suggestions are tracked as [GitHub issues](https://github.com/airtai/fastkafka/issues).\n\n- Use a **clear and descriptive title** for the issue to identify the suggestion.\n- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.\n- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.\n- **Explain why this enhancement would be useful** to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.\n\n## Development\n\n### Prepare the dev environment\n\nTo start contributing to fastkafka, you first have to prepare the development environment.\n\n#### Clone the fastkafka repository\n\nTo clone the repository, run the following command in the CLI:\n\n```shell\ngit clone https://github.com/airtai/fastkafka.git\n```\n\n#### Optional: create a virtual python environment\n\nTo prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:\n\n```shell\npython3 -m venv fastkafka-env\n```\n\nAnd to activate your virtual environment run:\n\n```shell\nsource fastkafka-env/bin/activate\n```\n\nTo learn more about virtual environments, please have a look at [official python documentation](https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available.)\n\n#### Install fastkafka\n\nTo install fastkafka, navigate to the root directory of the cloned fastkafka project and run:\n\n```shell\npip install fastkafka -e [.\"dev\"]\n```\n\n#### Install JRE and Kafka toolkit\n\nTo be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:\n\n1. Use our `fastkafka testing install-deps` CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR\n2. Install JRE and Kafka manually.\n   To do this, please refer to [JDK and JRE installation guide](https://docs.oracle.com/javase/9/install/toc.htm) and [Apache Kafka quickstart](https://kafka.apache.org/quickstart)\n   \n#### Install npm\n\nTo be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:\n\n1. Use our `fastkafka docs install_deps` CLI command which will install npm for you in your .local folder\nOR\n2. Install npm manually.\n   To do this, please refer to [NPM installation guide](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)\n   \n#### Install docusaurus\n\nTo generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project.\n\n#### Check if everything works\n\nAfter installing fastkafka and all the necessary dependencies, run `nbdev_test` in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a \"Success.\" message in your terminal, otherwise please refer to previous steps.\n\n### Way of working\n\nThe development of fastkafka is done in Jupyter notebooks. Inside the `nbs` directory you will find all the source code of fastkafka, this is where you will implement your changes.\n\nThe testing, cleanup and exporting of the code is being handled by `nbdev`, please, before starting the work on fastkafka, get familiar with it by reading [nbdev documentation](https://nbdev.fast.ai/getting_started.html).\n\nThe general philosopy you should follow when writing code for fastkafka is:\n\n- Function should be an atomic functionality, short and concise\n   - Good rule of thumb: your function should be 5-10 lines long usually\n- If there are more than 2 params, enforce keywording using *\n   - E.g.: `def function(param1, *, param2, param3): ...`\n- Define typing of arguments and return value\n   - If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected\n- After the function cell, write test cells using the assert keyword\n   - Whenever you implement something you should test tat functionality immediateli in the cells below \n- Add Google style python docstrings when function is implemented and tested\n\n### Before a PR\n\nAfter you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as painless for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):\n\n1. Format your notebooks: `nbqa black nbs`\n2. Close, shutdown, and clean the metadata from your notebooks: `nbdev_clean`\n3. Export your code: `nbdev_export`\n4. Run the tests: `nbdev_test`\n5. Test code typing: `mypy fastkafka`\n6. Test code safety with bandit: `bandit -r fastkafka`\n7. Test code safety with semgrep: `semgrep --config auto -r fastkafka`\n\nWhen you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.\n\n## Attribution\nThis guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/LICENSE.md",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/EventMetadata.md",
    "content": "## `fastkafka.EventMetadata` {#fastkafka.EventMetadata}\n\n\nA class for encapsulating Kafka record metadata.\n\n**Parameters**:\n- `topic`: The topic this record is received from\n- `partition`: The partition from which this record is received\n- `offset`: The position of this record in the corresponding Kafka partition\n- `timestamp`: The timestamp of this record\n- `timestamp_type`: The timestamp type of this record\n- `key`: The key (or `None` if no key is specified)\n- `value`: The value\n- `serialized_key_size`: The size of the serialized, uncompressed key in bytes\n- `serialized_value_size`: The size of the serialized, uncompressed value in bytes\n- `headers`: The headers\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/FastKafka.md",
    "content": "## `fastkafka.FastKafka` {#fastkafka.FastKafka}\n\n### `__init__` {#init}\n\n`def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Dict[str, Any], root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None`\n\nCreates FastKafka application\n\n**Parameters**:\n- `title`: optional title for the documentation. If None,\nthe title will be set to empty string\n- `description`: optional description for the documentation. If\nNone, the description will be set to empty string\n- `version`: optional version for the documentation. If None,\nthe version will be set to empty string\n- `contact`: optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'\n- `kafka_brokers`: dictionary describing kafka brokers used for\ngenerating documentation\n- `root_path`: path to where documentation will be created\n- `lifespan`: asynccontextmanager that is used for setting lifespan hooks.\n__aenter__ is called before app start and __aexit__ after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:`async with kafka_app...`\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]], typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `executor`: Type of executor to choose for consuming tasks. Avaliable options\nare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\n\"SequentialExecutor\" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n\"DynamicTaskExecutor\" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/KafkaEvent.md",
    "content": "## `fastkafka.KafkaEvent` {#fastkafka.KafkaEvent}\n\n\nA generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel\n\n**Parameters**:\n- `message`: The message contained in the Kafka event, can be of type pydantic.BaseModel.\n- `key`: The optional key used to identify the Kafka event.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/encoder/AvroBase.md",
    "content": "## `fastkafka.encoder.AvroBase` {#fastkafka.encoder.AvroBase}\n\n\nThis is base pydantic class that will add some methods\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/encoder/avro_decoder.md",
    "content": "## `fastkafka.encoder.avro_decoder` {#fastkafka.encoder.avro_decoder}\n\n### `avro_decoder` {#avro_decoder}\n\n`def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any`\n\nDecoder to decode avro encoded messages to pydantic model instance\n\n**Parameters**:\n- `raw_msg`: Avro encoded bytes message received from Kafka topic\n- `cls`: Pydantic class; This pydantic class will be used to construct instance of same class\n\n**Returns**:\n- An instance of given pydantic class\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/encoder/avro_encoder.md",
    "content": "## `fastkafka.encoder.avro_encoder` {#fastkafka.encoder.avro_encoder}\n\n### `avro_encoder` {#avro_encoder}\n\n`def avro_encoder(msg: pydantic.main.BaseModel) -> bytes`\n\nEncoder to encode pydantic instances to avro message\n\n**Parameters**:\n- `msg`: An instance of pydantic basemodel\n\n**Returns**:\n- A bytes message which is encoded from pydantic basemodel\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/encoder/avsc_to_pydantic.md",
    "content": "## `fastkafka.encoder.avsc_to_pydantic` {#fastkafka.encoder.avsc_to_pydantic}\n\n### `avsc_to_pydantic` {#avsc_to_pydantic}\n\n`def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass`\n\nGenerate pydantic model from given Avro Schema\n\n**Parameters**:\n- `schema`: Avro schema in dictionary format\n\n**Returns**:\n- Pydantic model class built from given avro schema\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/encoder/json_decoder.md",
    "content": "## `fastkafka.encoder.json_decoder` {#fastkafka.encoder.json_decoder}\n\n### `json_decoder` {#json_decoder}\n\n`def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any`\n\nDecoder to decode json string in bytes to pydantic model instance\n\n**Parameters**:\n- `raw_msg`: Bytes message received from Kafka topic\n- `cls`: Pydantic class; This pydantic class will be used to construct instance of same class\n\n**Returns**:\n- An instance of given pydantic class\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/encoder/json_encoder.md",
    "content": "## `fastkafka.encoder.json_encoder` {#fastkafka.encoder.json_encoder}\n\n### `json_encoder` {#json_encoder}\n\n`def json_encoder(msg: pydantic.main.BaseModel) -> bytes`\n\nEncoder to encode pydantic instances to json string\n\n**Parameters**:\n- `msg`: An instance of pydantic basemodel\n\n**Returns**:\n- Json string in bytes which is encoded from pydantic basemodel\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/executors/DynamicTaskExecutor.md",
    "content": "## `fastkafka.executors.DynamicTaskExecutor` {#fastkafka.executors.DynamicTaskExecutor}\n\n\nA class that implements a dynamic task executor for processing consumer records.\n\nThe DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task.\n\n### `__init__` {#init}\n\n`def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None`\n\nCreate an instance of DynamicTaskExecutor\n\n**Parameters**:\n- `throw_exceptions`: Flag indicating whether exceptions should be thrown ot logged.\nDefaults to False.\n- `max_buffer_size`: Maximum buffer size for the memory object stream.\nDefaults to 100_000.\n- `size`: Size of the task pool. Defaults to 100_000.\n\n### `run` {#run}\n\n`def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None`\n\nRuns the dynamic task executor.\n\n**Parameters**:\n- `is_shutting_down_f`: Function to check if the executor is shutting down.\n- `generator`: Generator function for retrieving consumer records.\n- `processor`: Processor function for processing consumer records.\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/executors/SequentialExecutor.md",
    "content": "## `fastkafka.executors.SequentialExecutor` {#fastkafka.executors.SequentialExecutor}\n\n\nA class that implements a sequential executor for processing consumer records.\n\nThe SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines.\n\n### `__init__` {#init}\n\n`def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None`\n\nCreate an instance of SequentialExecutor\n\n**Parameters**:\n- `throw_exceptions`: Flag indicating whether exceptions should be thrown or logged.\nDefaults to False.\n- `max_buffer_size`: Maximum buffer size for the memory object stream.\nDefaults to 100_000.\n\n### `run` {#run}\n\n`def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None`\n\nRuns the sequential executor.\n\n**Parameters**:\n- `is_shutting_down_f`: Function to check if the executor is shutting down.\n- `generator`: Generator function for retrieving consumer records.\n- `processor`: Processor function for processing consumer records.\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/testing/ApacheKafkaBroker.md",
    "content": "## `fastkafka.testing.ApacheKafkaBroker` {#fastkafka.testing.ApacheKafkaBroker}\n\n\nApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nInitialises the ApacheKafkaBroker object\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.ApacheKafkaBroker) -> str`\n\nStarts a local kafka broker and zookeeper instance synchronously\n\n**Returns**:\n- Kafka broker bootstrap server address in string format: add:port\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None`\n\nStops a local kafka broker and zookeeper instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/testing/LocalRedpandaBroker.md",
    "content": "## `fastkafka.testing.LocalRedpandaBroker` {#fastkafka.testing.LocalRedpandaBroker}\n\n\nLocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None`\n\nInitialises the LocalRedpandaBroker object\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n### `get_service_config_string` {#get_service_config_string}\n\n`def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str`\n\nGenerates a configuration for a service\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `service`: \"redpanda\", defines which service to get config string for\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.LocalRedpandaBroker) -> str`\n\nStarts a local redpanda broker instance synchronously\n\n**Returns**:\n- Redpanda broker bootstrap server address in string format: add:port\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None`\n\nStops a local redpanda broker instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/api/fastkafka/testing/Tester.md",
    "content": "## `fastkafka.testing.Tester` {#fastkafka.testing.Tester}\n\n### `__init__` {#init}\n\n`def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nMirror-like object for testing a FastKafka application\n\nCan be used as context manager\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]], typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `executor`: Type of executor to choose for consuming tasks. Avaliable options\nare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\n\"SequentialExecutor\" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n\"DynamicTaskExecutor\" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n### `using_local_kafka` {#using_local_kafka}\n\n`def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester`\n\nStarts local Kafka broker used by the Tester instance\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n**Returns**:\n- An instance of tester with Kafka as broker\n\n### `using_local_redpanda` {#using_local_redpanda}\n\n`def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester`\n\nStarts local Redpanda broker used by the Tester instance\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n**Returns**:\n- An instance of tester with Redpanda as broker\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/cli/fastkafka.md",
    "content": "# `fastkafka`\n\n**Usage**:\n\n```console\n$ fastkafka [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `docs`: Commands for managing fastkafka app...\n* `run`: Runs Fast Kafka API application\n* `testing`: Commands for managing fastkafka testing\n\n## `fastkafka docs`\n\nCommands for managing fastkafka app documentation\n\n**Usage**:\n\n```console\n$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `generate`: Generates documentation for a FastKafka...\n* `install_deps`: Installs dependencies for FastKafka...\n* `serve`: Generates and serves documentation for a...\n\n### `fastkafka docs generate`\n\nGenerates documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs generate [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created  [default: .]\n* `--help`: Show this message and exit.\n\n### `fastkafka docs install_deps`\n\nInstalls dependencies for FastKafka documentation generation\n\n**Usage**:\n\n```console\n$ fastkafka docs install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n### `fastkafka docs serve`\n\nGenerates and serves documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs serve [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created  [default: .]\n* `--bind TEXT`: Some info  [default: 127.0.0.1]\n* `--port INTEGER`: Some info  [default: 8000]\n* `--help`: Show this message and exit.\n\n## `fastkafka run`\n\nRuns Fast Kafka API application\n\n**Usage**:\n\n```console\n$ fastkafka run [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--num-workers INTEGER`: Number of FastKafka instances to run, defaults to number of CPU cores.  [default: 64]\n* `--kafka-broker TEXT`: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.  [required]\n* `--help`: Show this message and exit.\n\n## `fastkafka testing`\n\nCommands for managing fastkafka testing\n\n**Usage**:\n\n```console\n$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `install_deps`: Installs dependencies for FastKafka app...\n\n### `fastkafka testing install_deps`\n\nInstalls dependencies for FastKafka app testing\n\n**Usage**:\n\n```console\n$ fastkafka testing install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/cli/run_fastkafka_server_process.md",
    "content": "# `run_fastkafka_server_process`\n\n**Usage**:\n\n```console\n$ run_fastkafka_server_process [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--kafka-broker TEXT`: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.  [required]\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_00_FastKafka_Demo.md",
    "content": "FastKafka tutorial\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\n``` python\ntry:\n    import fastkafka\nexcept:\n    ! pip install fastkafka\n```\n\n## Running in Colab\n\nYou can start this interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\" />\n</a>\n\n## Writing server code\n\nHere is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic.\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/usage/types/#constrained-types),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server.\n\nNext, an object of the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstances which internally starts Kafka broker and zookeeper.\n\nBefore running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:\n\n``` sh\nfastkafka testing install_deps\n```\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send IrisInputData message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with IrisPrediction in predictions topic\n    await tester.awaited_mocks.on_predictions.assert_awaited_with(\n        IrisPrediction(species=\"setosa\"), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purpuoses\n\n3.  Sent IrisInputData message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to IrisInputData message\n\n## Running the service\n\nThe service can be started using builtin `faskafka run` CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nTo run the service, you will need a running Kafka broker on localhost as\nspecified in the `kafka_brokers` parameter above. We can start the Kafka\nbroker locally using the\n[`ApacheKafkaBroker`](../api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker).\nNotice that the same happens automatically in the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\nas shown above.\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n    '127.0.0.1:9092'\n\nThen, we start the FastKafka service by running the following command in\nthe folder where the `application.py` file is located:\n\n``` sh\nfastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n```\n\nIn the above command, we use `--num-workers` option to specify how many\nworkers to launch and we use `--kafka-broker` option to specify which\nkafka broker configuration to use from earlier specified `kafka_brokers`\n\n    [1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n    [1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n    ^C\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n\nYou need to interupt running of the cell above by selecting\n`Runtime->Interupt execution` on the toolbar above.\n\nFinally, we can stop the local Kafka Broker:\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nWhen running in Colab, we need to update Node.js first:\n\nWe need to install all dependancies for the generator using the\nfollowing command line:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfolloving command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n. This will generate the *asyncapi* folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\n    drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    ^C\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    bootstrap_servers=\"localhost:9092\",\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_01_Intro.md",
    "content": "Intro\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nThis tutorial will show you how to use <b>FastKafkaAPI</b>, step by\nstep.\n\nThe goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel.\n\nIn this Intro tutorial we’ll go trough the basic requirements to run the\ndemos presented in future steps.\n\n## Installing FastKafkaAPI\n\nFirst step is to install FastKafkaAPI\n\n``` shell\n$ pip install fastkafka\n```\n\n## Preparing a Kafka broker\n\nNext step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication.\n\n!!! info \"Hey, your first info!\"\n\n    If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n\nTo go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times).\n\n!!! warning \"Listen! This is important.\"\n\n    To be able to setup this configuration you need to have Docker and docker-compose installed\n\n    See here for more info on <a href = \\\"https://docs.docker.com/\\\" target=\\\"_blank\\\">Docker</a> and <a href = \\\"https://docs.docker.com/compose/install/\\\" target=\\\"_blank\\\">docker compose</a>\n\nTo setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g. fastkafka_demo). Inside the new\nfolder create a new YAML file named <b>kafka_demo.yml</b> and copy the\nfollowing configuration into it:\n\n``` yaml\nversion: \"3\"\nservices:\n    zookeeper:\n        image: wurstmeister/zookeeper\n        hostname: zookeeper\n        container_name: zookeeper\n        networks:\n          - fastkafka-network\n        ports:\n          - \"2181:2181\"\n          - \"22:22\"\n          - \"2888:2888\"\n          - \"3888:3888\"\n    kafka:\n        image: wurstmeister/kafka\n        container_name: kafka\n        ports:\n          - \"9093:9093\"\n        environment:\n            HOSTNAME_COMMAND: \"docker info | grep ^Name: | cut -d' ' -f 2\"\n            KAFKA_ZOOKEEPER_CONNECT: \"zookeeper:2181\"\n            KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n            KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n            KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n            KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n            KAFKA_CREATE_TOPICS: \"hello:1:1\"\n        volumes:\n            - /var/run/docker.sock:/var/run/docker.sock\n        depends_on:\n            - zookeeper\n        healthcheck:\n            test: [ \"CMD\", \"kafka-topics.sh\", \"--list\", \"--zookeeper\", \"zookeeper:2181\" ]\n            interval: 5s\n            timeout: 10s\n            retries: 5\n        networks:\n          - fastkafka-network\nnetworks:\n    fastkafka-network:\n        name: \"fastkafka-network\"\n```\n\nThis configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a ‘hello’ topic (quite enough for a\nstart). To start the configuration, run:\n\n``` shell\n$ docker-compose -f kafka_demo.yaml up -d --wait\n```\n\nThis will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! :confetti_ball:\n\n## Running the code\n\nAfter installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the ‘First Steps’ part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem.\n\nYou are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_02_First_Steps.md",
    "content": "First Steps\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Creating a simple Kafka consumer app\n\nFor our first demo we will create the simplest possible Kafka consumer\nand run it using ‘fastkafka run’ command.\n\nThe consumer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n\n2.  Listen to the hello topic\n\n3.  Write any message received from the hello topic to stdout\n\nTo create the consumer, first, create a file named\n<b>hello_kafka_consumer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n    \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n    print(f\"Got data, msg={msg.msg}\", flush=True)\n```\n\n!!! info \"Kafka configuration\"\n\n    This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\n!!! warning \"Remember to flush\"\n\n    Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n\nTo run this consumer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n    [878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n\nNow you can interact with your consumer, by sending the messages to the\nsubscribed ‘hello’ topic, don’t worry, we will cover this in the next\nstep of this guide.\n\n## Sending first message to your consumer\n\nAfter we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly.\n\nIf you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic.\n\nFirst, connect to your running kafka broker by running:\n\n``` shell\ndocker run -it kafka /bin/bash\n```\n\nThen, when connected to the container, run:\n\n``` shell\nkafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n```\n\nThis will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer.\n\nIn the shell, type:\n\n``` shell\n{\"msg\":\"hello\"}\n```\n\nand press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout.\n\nCheck the output of your consumer (terminal where you ran the ‘fastkafka\nrun’ command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:\n\n``` shell\nGot data, msg=hello\n```\n\n## Creating a hello Kafka producer\n\nConsuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let’s create our first\nkafka producer which will send it’s greetings to our consumer\nperiodically.\n\nThe producer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n2.  Connect to the hello topic\n3.  Periodically send a message to the hello world topic\n\nTo create the producer, first, create a file named\n<b>hello_kafka_producer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n    logger.info(f\"Producing: {msg}\")\n    return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello(HelloKafkaMsg(msg=\"hello\"))\n        await asyncio.sleep(1)\n```\n\n!!! info \"Kafka configuration\"\n\n    This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\nTo run this producer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n\nNow, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log.\n\n## Recap\n\nIn this guide we have:\n\n1.  Created a simple Kafka consumer using FastKafka\n2.  Sent a message to our consumer trough Kafka\n3.  Created a simple Kafka producer using FastKafka\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_03_Authentication.md",
    "content": "Authentication\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## TLS Authentication\n\nsasl_mechanism (str) – Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN\n\nsasl_plain_username (str) – username for SASL PLAIN authentication.\nDefault: None\n\nsasl_plain_password (str) – password for SASL PLAIN authentication.\nDefault: None\n\nsasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_04_Github_Actions_Workflow.md",
    "content": "Deploy FastKafka docs to GitHub Pages\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Getting started\n\nAdd your workflow file `.github/workflows/fastkafka_docs_deploy.yml` and\npush it to your remote default branch.\n\nHere is an example workflow:\n\n``` yaml\nname: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n  push:\n    branches: [ \"main\", \"master\" ]\n  workflow_dispatch:\n\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n      - uses: airtai/workflows/fastkafka-ghp@main\n        with:\n          app: \"test_fastkafka.application:kafka_app\"\n```\n\n## Options\n\n### Set app location\n\nInput in the form of `path:app`, where `path` is the path to a Python\nfile and `app` is an object of type\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka):\n\n``` yaml\n- name: Deploy\n  uses: airtai/workflows/fastkafka-ghp@main\n  with:\n    app: \"test_fastkafka.application:kafka_app\"\n```\n\nIn the above example,\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp is named as `kafka_app` and it is available in the `application`\nsubmodule of the `test_fastkafka` module.\n\n## Example Repository\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to `Github Pages` can be found\n[here](https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_05_Lifespan_Handler.md",
    "content": "Lifespan Events\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nDid you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! :rocket:\n\nLets break it down:\n\nYou can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages.\n\nSimilarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up.\n\nBy executing code before consuming and after producing, you cover the\nentire lifecycle of your application :tada:\n\nThis is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!\n\nSo lets give it a try and see how it can make your Kafka app even more\nawesome! :muscle:\n\n## Lifespan example - Iris prediction model\n\nLet’s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don’t want to load them for every\nmessage.\n\nHere’s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we’ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication.\n\n### Lifespan\n\nYou can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager.\n\nLet’s start with an example and then see it in detail.\n\nWe create an async function lifespan() with yield like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \n```\n\nThe first thing to notice, is that we are defining an async function\nwith `yield`. This is very similar to Dependencies with `yield`.\n\nThe first part of the function, before the `yield`, will be executed\n**before** the application starts. And the part after the `yield` will\nbe executed **after** the application has finished.\n\nThis lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown.\n\nThe lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup.\n\nFor demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called.\n\n### Async context manager\n\nContext managers can be used in `with` blocks, our lifespan, for example\ncould be used like this:\n\n``` python\nml_models = {}\nasync with lifespan(None):\n    print(ml_models)\n```\n\nWhen you create a context manager or an async context manager, what it\ndoes is that, before entering the `with` block, it will execute the code\nbefore the `yield`, and after exiting the `with` block, it will execute\nthe code after the `yield`.\n\nIf you want to learn more about context managers and contextlib\ndecorators, please visit [Python official\ndocs](https://docs.python.org/3/library/contextlib.html)\n\n## App demo\n\n### FastKafka app\n\nLets now create our application using the created lifespan handler.\n\nNotice how we passed our lifespan handler to the app when constructing\nit trough the `lifespan` argument.\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n```\n\n### Data modeling\n\nLets model the Iris data for our app:\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Consumers and producers\n\nLets create a consumer and producer for our app that will generate\npredictions from input iris data.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Final app\n\nThe final app looks like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Running the app\n\nNow we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n```\n\nWhen you run the app, you should see a simmilar output to the one below:\n\n    [262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [262292]: Loading the model!\n    [262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n    [262292]: Exiting, clearing model dict!\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n\n## Recap\n\nIn this guide we have defined a lifespan handler and passed to our\nFastKafka app.\n\nSome important points are:\n\n1.  Lifespan handler is implemented as\n    [AsyncContextManager](https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager)\n2.  Code **before** yield in lifespan will be executed **before**\n    application **startup**\n3.  Code **after** yield in lifespan will be executed **after**\n    application **shutdown**\n4.  You can pass your lifespan handler to FastKafka app on\n    initialisation by passing a `lifespan` argument\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_06_Benchmarking_FastKafka.md",
    "content": "Benchmarking FastKafka app\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\nTo benchmark a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nproject, you will need the following:\n\n1.  A library built with\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka).\n2.  A running `Kafka` instance to benchmark the FastKafka application\n    against.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nhas a decorator for benchmarking which is appropriately called as\n`benchmark`. Let’s edit our `application.py` file and add the\n`benchmark` decorator to the consumes method.\n\n``` python\n# content of the \"application.py\" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nHere we are conducting a benchmark of a function that consumes data from\nthe `input_data` topic with an interval of 1 second and a sliding window\nsize of 5.\n\nThis `benchmark` method uses the `interval` parameter to calculate the\nresults over a specific time period, and the `sliding_window_size`\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation.\n\nThis benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement.\n\n### Starting Kafka\n\nIf you already have a `Kafka` running somewhere, then you can skip this\nstep.\n\nPlease keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself.\n\n#### Installing Java and Kafka\n\nWe need a working `Kafka`instance to benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp, and to run `Kafka` we need `Java`. Thankfully,\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\ncomes with a CLI to install both `Java` and `Kafka` on our machine.\n\nSo, let’s install `Java` and `Kafka` by executing the following command.\n\n``` cmd\nfastkafka testing install_deps\n```\n\nThe above command will extract `Kafka` scripts at the location\n“\\$HOME/.local/kafka_2.13-3.3.2\" on your machine.\n\n#### Creating configuration for Zookeeper and Kafka\n\nNow we need to start `Zookeeper` and `Kafka` separately, and to start\nthem we need `zookeeper.properties` and `kafka.properties` files.\n\nLet’s create a folder inside the folder where `Kafka` scripts were\nextracted and change directory into it.\n\n``` cmd\nmkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n```\n\nLet’s create a file called `zookeeper.properties` and write the\nfollowing content to the file:\n\n``` txt\ndataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n```\n\nSimilarly, let’s create a file called `kafka.properties` and write the\nfollowing content to the file:\n\n``` txt\nbroker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n```\n\n#### Starting Zookeeper and Kafka\n\nWe need two different terminals to run `Zookeeper` in one and `Kafka` in\nanother. Let’s open a new terminal and run the following commands to\nstart `Zookeeper`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n```\n\nOnce `Zookeeper` is up and running, open a new terminal and execute the\nfollwing commands to start `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n```\n\nNow we have both `Zookeeper` and `Kafka` up and running.\n\n#### Creating topics in Kafka\n\nIn a new terminal, please execute the following command to create\nnecessary topics in `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n```\n\n#### Populating topics with dummy data\n\nTo benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp, we need some data in `Kafka` topics.\n\nIn the same terminal, let’s create some dummy data:\n\n``` cmd\nyes '{\"sepal_length\": 0.7739560486, \"sepal_width\": 0.8636615789, \"petal_length\": 0.6122663046, \"petal_width\": 0.1338914722}' | head -n 1000000 > /tmp/test_data\n```\n\nThis command will create a file called `test_data` in the `tmp` folder\nwith one million rows of text. This will act as dummy data to populate\nthe `input_data` topic.\n\nLet’s populate the created topic `input_data` with the dummy data which\nwe created above:\n\n``` cmd\n./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n```\n\nNow our topic `input_data` has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again.\n\n### Benchmarking FastKafka\n\nOnce `Zookeeper` and `Kafka` are ready, benchmarking\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp is as simple as running the `fastkafka run` command:\n\n``` cmd\nfastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n```\n\nThis command will start the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp and begin consuming messages from `Kafka`, which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the `interval` and `sliding_window_size` values.\n\nThe output for the `fastkafka run` command is:\n\n``` txt\n[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n```\n\nBased on the output, when using 1 worker, our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp achieved a `throughput` of 93k messages per second and an\n`average throughput` of 93k messages per second.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",
    "content": "Encoding and Decoding Kafka Messages with FastKafka\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\n1.  A basic knowledge of\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    is needed to proceed with this guide. If you are not familiar with\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka),\n    please go through the [tutorial](/docs#tutorial) first.\n2.  [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    with its dependencies installed is needed. Please install\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    using the command - `pip install fastkafka`\n\n## Ways to Encode and Decode Messages with FastKafka\n\nIn python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings.\n\nIn FastKafka, we specify message schema using Pydantic models as\nmentioned in [tutorial](/docs#messages):\n\n``` python\n# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nThen, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics.\n\nThe `@consumes` and `@produces` methods of FastKafka accept a parameter\ncalled `decoder`/`encoder` to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:\n\n1.  json - This is the default encoder/decoder option in FastKafka.\n    While producing, this option converts our instance of Pydantic model\n    messages to a JSON string and then converts it to bytes before\n    sending it to the topic. While consuming, it converts bytes to a\n    JSON string and then constructs an instance of Pydantic model from\n    the JSON string.\n2.  avro - This option uses Avro encoding/decoding to convert instances\n    of Pydantic model messages to bytes while producing, and while\n    consuming, it constructs an instance of Pydantic model from bytes.\n3.  custom encoder/decoder - If you are not happy with the json or avro\n    encoder/decoder options, you can write your own encoder/decoder\n    functions and use them to encode/decode Pydantic messages.\n\n## 1. Json encoder and decoder\n\nThe default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string.\n\nWe can use the application from [tutorial](/docs#running-the-service) as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder\nparameter:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"json\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"json\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above code, the `@kafka_app.consumes` decorator sets up a\nconsumer for the “input_data\" topic, using the ‘json’ decoder to convert\nthe message payload to an instance of `IrisInputData`. The\n`@kafka_app.produces` decorator sets up a producer for the “predictions\"\ntopic, using the ‘json’ encoder to convert the instance of\n`IrisPrediction` to message payload.\n\n## 2. Avro encoder and decoder\n\n### What is Avro?\n\nAvro is a row-oriented remote procedure call and data serialization\nframework developed within Apache’s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n[docs](https://avro.apache.org/docs/).\n\n### Installing FastKafka with Avro dependencies\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nwith Avro support using the command - `pip install fastkafka[avro]`\n\n### Defining Avro Schema Using Pydantic Models\n\nBy default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models.\n\nSo, similar to the [tutorial](/docs#tutorial), the message schema will\nremain as it is.\n\n``` python\n# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nNo need to change anything to support avro. You can use existing\nPydantic models as is.\n\n### Reusing existing avro schema\n\nIf you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined.\n\n#### Building pydantic models from avro schema dictionary\n\nLet’s modify the above example and let’s assume we have schemas already\nfor `IrisInputData` and `IrisPrediction` which will look like below:\n\n``` python\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n```\n\nWe can easily construct pydantic models from avro schema using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction which is included as part of\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nitself.\n\n``` python\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\nThe above code will convert avro schema to pydantic models and will\nprint pydantic models’ fields. The output of the above is:\n\n``` txt\n{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n```\n\nThis is exactly same as manually defining the pydantic models ourselves.\nYou don’t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction as demonstrated above.\n\n#### Building pydantic models from `.avsc` file\n\nNot all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary `.avsc` files in\nfilesystem. Let’s see how to convert those `.avsc` files to pydantic\nmodels.\n\nLet’s assume our avro files are stored in files called\n`iris_input_data_schema.avsc` and `iris_prediction_schema.avsc`. In that\ncase, following code converts the schema to pydantic models:\n\n``` python\nimport json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open(\"iris_input_data_schema.avsc\", \"rb\") as f:\n    iris_input_data_schema = json.load(f)\n    \nwith open(\"iris_prediction_schema.avsc\", \"rb\") as f:\n    iris_prediction_schema = json.load(f)\n    \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\n### Consume/Produce avro messages with FastKafka\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nprovides `@consumes` and `@produces` methods to consume/produces\nmessages to/from a `Kafka` topic. This is explained in\n[tutorial](/docs#function-decorators).\n\nThe `@consumes` and `@produces` methods accepts a parameter called\n`decoder`/`encoder` to decode/encode avro messages.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", encoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", decoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above example, in `@consumes` and `@produces` methods, we\nexplicitly instruct FastKafka to `decode` and `encode` messages using\nthe `avro` `decoder`/`encoder` instead of the default `json`\n`decoder`/`encoder`.\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use `avro` to `decode` and\n`encode` messages:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThe above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages.\n\nThe\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is then instantiated with the broker details, and two functions\ndecorated with `@kafka_app.consumes` and `@kafka_app.produces` are\ndefined to consume messages from the “input_data\" topic and produce\nmessages to the “predictions\" topic, respectively. The functions uses\nthe decoder=“avro\" and encoder=“avro\" parameters to decode and encode\nthe Avro messages.\n\nIn summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline.\n\n## 3. Custom encoder and decoder\n\nIf you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages.\n\n### Writing a custom encoder and decoder\n\nIn this section, let’s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n[ROT13](https://en.wikipedia.org/wiki/ROT13) cipher.\n\n``` python\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n```\n\nThe above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library.\n\nThe encoding function, `custom_encoder()`, takes a message `msg` which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe `json()` method, obfuscates the resulting string using the ROT13\nalgorithm from the `codecs` module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding.\n\nThe decoding function, `custom_decoder()`, takes a raw message `raw_msg`\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the `json.loads()` method and returns a\nnew instance of the specified `cls` class initialized with the decoded\ndictionary.\n\nThese functions can be used with FastKafka’s `encoder` and `decoder`\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics.\n\nLet’s test the above code\n\n``` python\ni = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n```\n\nThis will result in following output\n\n``` txt\nb'{\"frcny_yratgu\": 0.5, \"frcny_jvqgu\": 0.5, \"crgny_yratgu\": 0.5, \"crgny_jvqgu\": 0.5}'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n```\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use our custom decoder and\nencoder functions:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\n\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThis code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system.\n\nThe custom `encoder` function takes a message represented as a\n`BaseModel` and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned.\n\nThe custom `decoder` function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the `json`\nmodule. This dictionary is then converted to a `BaseModel` instance\nusing the cls parameter.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_11_Consumes_Basics.md",
    "content": "@consumes basics\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@consumes` decorator to consume messages from Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will consume\n`HelloWorld` messages from hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n\nTo use the `@consumes` decorator, first we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\nIn this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger.\n\n``` python\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to\nconsume from the topic using [pydantic](https://docs.pydantic.dev/). For\nthe guide we’ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a consumer function and decorate it with `@consumes`\n\nLet’s create a consumer function that will consume `HelloWorld` messages\nfrom *hello_world* topic and log them.\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nThe function decorated with the `@consumes` decorator will be called\nwhen a message is produced to Kafka.\n\nThe message will then be injected into the typed *msg* argument of the\nfunction and its type will be used to parse the message.\n\nIn this example case, when the message is sent into a *hello_world*\ntopic, it will be parsed into a HelloWorld class and `on_hello_world`\nfunction will be called with the parsed class as *msg* argument value.\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in consumer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.\n\n## Send the message to kafka topic\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\n``` python\nprint(consumer_task.value[1].decode(\"UTF-8\"))\n```\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are\nreceiving the message from, this is because the `@consumes` decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default “on\\_\" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n*hello_world*.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nconsumes decorator, like this:\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in consumes decorator, like this:\n\n## Message data\n\nThe message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of *msg* parameter in the\nfunction decorated by the `@consumes` decorator.\n\nIn this example case, the message will be parsed into a `HelloWorld`\nclass.\n\n## Message metadata\n\nIf you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction.\n\nLet’s demonstrate that.\n\n### Create a consumer function with metadata\n\nThe only difference from the original basic consume function is that we\nare now passing the `meta: EventMetadata` argument to the function. The\n`@consumes` decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains.\n\nFirst, we need to import the EventMetadata\n\nNow we can add the `meta` argument to our consuming function.\n\nYour final app should look like this:\n\nNow lets run the app and send a message to the broker to see the logged\nmessage metadata.\n\nYou should see a similar log as the one below and the metadata being\nlogged in your app.\n\nAs you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n:tada:\n\n## Dealing with high latency consuming functions\n\nIf your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consumeing events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead.\n\nBut, to handle those high latency tasks and run them in parallel,\nFastKafka has a\n[`DynamicTaskExecutor`](../api/fastkafka/executors/DynamicTaskExecutor.md/#fastkafka.executors.DynamicTaskExecutor)\nprepared for your consumers. This executor comes with additional\noverhead, so use it only when you need to handle high latency functions.\n\nLets demonstrate how to use it.\n\n``` python\ndecorate_consumes_executor = \"\"\"@app.consumes(executor=\"DynamicTaskExecutor\")\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n\"\"\"\nmd(f\"```python\\n{decorate_consumes}\\n```\")\n```\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\n``` python\nprint(consumer_task.value[1].decode(\"UTF-8\"))\n```\n\n    [6814]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:50361'\n    [6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:50361', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [6814]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [6814]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [6814]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    [6814]: ConsumerRecord(topic='hello_world', partition=0, offset=0, timestamp=1683803949271, timestamp_type=0, key=None, value=b'{\"msg\": \"Hello world\"}', checksum=None, serialized_key_size=-1, serialized_value_size=22, headers=())\n    [6814]: [INFO] consumer_example: Got msg: msg='Hello world'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 6814...\n    [6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 6814 terminated.\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_21_Produces_Basics.md",
    "content": "@produces basics\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@produces` decorator to produce messages to Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n\nTo use the `@produces` decorator, frist we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to send\nto the topic using [pydantic](https://docs.pydantic.dev/). For the guide\nwe’ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a producer function and decorate it with `@produces`\n\nLet’s create a producer function that will produce `HelloWorld` messages\nto *hello_world* topic:\n\n``` python\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nNow you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic.\n\nBy default, the topic is determined from your function name, the “to\\_\"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is *hello_world*.\n\n## Instruct the app to start sending HelloWorld messages\n\nLet’s use `@run_in_background` decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second.\n\n``` python\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\n``` python\nscript_file = \"producer_example.py\"\ncmd = \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\"\nmd(\n    f\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```\"\n)\n```\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n\n## Check if the message was sent to the Kafka topic\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the {“msg\": “Hello world!\"} messages in your topic.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are sending\nthe message to, this is because the `@produces` decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default “to\\_\" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n*hello_world*.\n\n!!! warn \"New topics\"\n\n    Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nproduces decorator, like this:\n\n``` python\n\n@app.produces(prefix=\"send_to_\")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in produces decorator, like this:\n\n``` python\n\n@app.produces(topic=\"my_special_topic\")\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\n## Message data\n\nThe return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app.\n\nIn this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n`b'{\"msg\": \"Hello world!\"}'`\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_22_Partition_Keys.md",
    "content": "Defining a partition key\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nPartition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance.\n\nYou can define your partition keys when using the `@produces` decorator,\nthis guide will demonstrate to you this feature.\n\n## Return a key from the producing function\n\nTo define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass and set the key value. Check the example below:\n\n``` python\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n```\n\nIn the example, we want to return the `HelloWorld` message class with\nthe key defined as *my_key*. So, we wrap the message and key into a\nKafkaEvent class and return it as such.\n\nWhile generating the documentation, the\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass will be unwrapped and the `HelloWorld` class will be documented in\nthe definition of message type, same way if you didn’t use the key.\n\n!!! info \"Which key to choose?\"\n\n    Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n\n## App example\n\nWe will modify the app example from **@producer basics** guide to return\nthe `HelloWorld` with our key. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n\n## Check if the message was sent to the Kafka topic with the desired key\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic with the defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the *my_key {“msg\": “Hello world!\"}* messages in your\ntopic appearing, the *my_key* part of the message is the key that we\ndefined in our producing function.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_23_Batch_Producing.md",
    "content": "Batch producing\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nIf you want to send your data in batches `@produces` decorator makes\nthat possible for you. By returning a `list` of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker.\n\nThis guide will demonstrate how to use this feature.\n\n## Return a batch from the producing function\n\nTo define a batch that you want to produce to Kafka topic, you need to\nreturn the `List` of the messages that you want to be batched from your\nproducing function.\n\n``` python\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\nIn the example, we want to return the `HelloWorld` message class batch\nthat is created from a list of msgs we passed into our producing\nfunction.\n\nLets also prepare a backgound task that will send a batch of “hello\nworld\" messages when the app starts.\n\n``` python\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n```\n\n## App example\n\nWe will modify the app example from [@producer\nbasics](/docs/guides/Guide_21_Produces_Basics.md) guide to return the\n`HelloWorld` batch. The final app will look like this (make sure you\nreplace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n    [46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n\n## Check if the batch was sent to the Kafka topic with the defined key\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages in your topic.\n\n## Batch key\n\nTo define a key for your batch like in [Defining a partition\nkey](/docs/guides/Guide_22_Partition_Keys.md) guide you can wrap the\nreturning value in a\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass. To learn more about defining a partition ke and\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass, please, have a look at [Defining a partition\nkey](/docs/guides/Guide_22_Partition_Keys.md) guide.\n\nLet’s demonstrate that.\n\nTo define a key, we just need to modify our producing function, like\nthis:\n\n``` python\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\nNow our app looks like this:\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\n## Check if the batch was sent to the Kafka topic\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic, containing a defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages with the defined key in your topic.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",
    "content": "Deploying FastKafka using Docker\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Building a Docker Image\n\nTo build a Docker image for a FastKafka project, we need the following\nitems:\n\n1.  A library that is built using FastKafka.\n2.  A file in which the requirements are specified. This could be a\n    requirements.txt file, a setup.py file, or even a wheel file.\n3.  A Dockerfile to build an image that will include the two files\n    mentioned above.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Creating requirements.txt file\n\nThe above code only requires `fastkafka`. So, we will add only\n`fastkafka` to the `requirements.txt` file, but you can add additional\nrequirements to it as well.\n\n``` txt\nfastkafka>=0.3.0\n```\n\nHere we are using `requirements.txt` to store the project’s\ndependencies. However, other methods like `setup.py`, `pipenv`, and\n`wheel` files can also be used. `setup.py` is commonly used for\npackaging and distributing Python modules, while `pipenv` is a tool used\nfor managing virtual environments and package dependencies. `wheel`\nfiles are built distributions of Python packages that can be installed\nwith pip.\n\n### Creating Dockerfile\n\n``` dockerfile\n# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD [\"fastkafka\", \"run\", \"--num-workers\", \"2\", \"--kafka-broker\", \"production\", \"application:kafka_app\"]\n```\n\n1.  Start from the official Python base image.\n\n2.  Set the current working directory to `/project`.\n\n    This is where we’ll put the `requirements.txt` file and the\n    `application.py` file.\n\n3.  Copy the `application.py` file and `requirements.txt` file inside\n    the `/project` directory.\n\n4.  Install the package dependencies in the requirements file.\n\n    The `--no-cache-dir` option tells `pip` to not save the downloaded\n    packages locally, as that is only if `pip` was going to be run again\n    to install the same packages, but that’s not the case when working\n    with containers.\n\n    The `--upgrade` option tells `pip` to upgrade the packages if they\n    are already installed.\n\n5.  Set the **command** to run the `fastkafka run` command.\n\n    `CMD` takes a list of strings, each of these strings is what you\n    would type in the command line separated by spaces.\n\n    This command will be run from the **current working directory**, the\n    same `/project` directory you set above with `WORKDIR /project`.\n\n    We supply additional parameters `--num-workers` and `--kafka-broker`\n    for the run command. Finally, we specify the location of our\n    `fastkafka` application location as a command argument.\n\n    To learn more about `fastkafka run` command please check the [CLI\n    docs](../../cli/fastkafka/#fastkafka-run).\n\n### Build the Docker Image\n\nNow that all the files are in place, let’s build the container image.\n\n1.  Go to the project directory (where your `Dockerfile` is, containing\n    your `application.py` file).\n\n2.  Run the following command to build the image:\n\n    ``` cmd\n    docker build -t fastkafka_project_image .\n    ```\n\n    This command will create a docker image with the name\n    `fastkafka_project_image` and the `latest` tag.\n\nThat’s it! You have now built a docker image for your FastKafka project.\n\n### Start the Docker Container\n\nRun a container based on the built image:\n\n``` cmd\ndocker run -d --name fastkafka_project_container fastkafka_project_image\n```\n\n## Additional Security\n\n`Trivy` is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here’s\nhow you can use `trivy` to scan your `fastkafka_project_image`:\n\n1.  Install `trivy` on your local machine by following the instructions\n    provided in the [official `trivy`\n    documentation](https://aquasecurity.github.io/trivy/latest/getting-started/installation/).\n\n2.  Run the following command to scan your fastkafka_project_image:\n\n    ``` cmd\n    trivy image fastkafka_project_image\n    ```\n\n    This command will scan your `fastkafka_project_image` for any\n    vulnerabilities and provide you with a report of its findings.\n\n3.  Fix any vulnerabilities identified by `trivy`. You can do this by\n    updating the vulnerable package to a more secure version or by using\n    a different package altogether.\n\n4.  Rebuild your `fastkafka_project_image` and repeat steps 2 and 3\n    until `trivy` reports no vulnerabilities.\n\nBy using `trivy` to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities.\n\n## Example repo\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n[here](https://github.com/airtai/sample_fastkafka_project/)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",
    "content": "Using Redpanda to test FastKafka\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## What is FastKafka?\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## What is Redpanda?\n\nRedpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda.\n\nFrom [redpanda.com](https://redpanda.com/):\n\n> Redpanda is a Kafka®-compatible streaming data platform that is proven\n> to be 10x faster and 6x lower in total costs. It is also JVM-free,\n> ZooKeeper®-free, Jepsen-tested and source available.\n\nSome of the advantages of Redpanda over Kafka are\n\n1.  A single binary with built-in everything, no ZooKeeper® or JVM\n    needed.\n2.  Costs upto 6X less than Kafka.\n3.  Up to 10x lower average latencies and up to 6x faster Kafka\n    transactions without compromising correctness.\n\nTo learn more about Redpanda, please visit their\n[website](https://redpanda.com/) or checkout this [blog\npost](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\ncomparing Redpanda and Kafka’s performance benchmarks.\n\n## Example repo\n\nA sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n[here](https://github.com/airtai/sample_fastkafka_with_redpanda).\n\n## The process\n\nHere are the steps we’ll be walking through to build our example:\n\n1.  Set up the prerequisites.\n2.  Clone the example repo.\n3.  Explain how to write an application using FastKafka.\n4.  Explain how to write a test case to test FastKafka with Redpanda.\n5.  Run the test case and produce/consume messages.\n\n## 1. Prerequisites\n\nBefore starting, make sure you have the following prerequisites set up:\n\n1.  **Python 3.x**: A Python 3.x installation is required to run\n    FastKafka. You can download the latest version of Python from the\n    [official website](https://www.python.org/downloads/). You’ll also\n    need to have pip installed and updated, which is Python’s package\n    installer.\n2.  **Docker Desktop**: Docker is used to run Redpanda, which is\n    required for testing FastKafka. You can download and install Docker\n    Desktop from the [official\n    website](https://www.docker.com/products/docker-desktop/).\n3.  **Git**: You’ll need to have Git installed to clone the example\n    repo. You can download Git from the [official\n    website](https://git-scm.com/downloads).\n\n## 2. Cloning and setting up the example repo\n\nTo get started with the example code, clone the [GitHub\nrepository](https://github.com/airtai/sample_fastkafka_with_redpanda) by\nrunning the following command in your terminal:\n\n``` cmd\ngit clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n```\n\nThis will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files.\n\n### Create a virtual environment\n\nBefore writing any code, let’s [create a new virtual\nenvironment](https://docs.python.org/3/library/venv.html#module-venv)\nfor our project.\n\nA virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects.\n\nTo create a new virtual environment, run the following commands in your\nterminal:\n\n``` cmd\npython3 -m venv venv\n```\n\nThis will create a new directory called `venv` in your project\ndirectory, which will contain the virtual environment.\n\nTo activate the virtual environment, run the following command:\n\n``` cmd\nsource venv/bin/activate\n```\n\nThis will change your shell’s prompt to indicate that you are now\nworking inside the virtual environment.\n\nFinally, run the following command to upgrade `pip`, the Python package\ninstaller:\n\n``` cmd\npip install --upgrade pip\n```\n\n### Install Python dependencies\n\nNext, let’s install the required Python dependencies. In this guide,\nwe’ll be using\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nto write our application code and `pytest` and `pytest-asyncio` to test\nit.\n\nYou can install the dependencies from the `requirements.txt` file\nprovided in the cloned repository by running:\n\n``` cmd\npip install -r requirements.txt\n```\n\nThis will install all the required packages and their dependencies.\n\n## 3. Writing server code\n\nThe `application.py` file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker.\n\nNext, an instance of the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum required arguments:\n\n- `kafka_brokers`: a dictionary used for generating documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## 4. Writing the test code\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstance which can be configured to start a [Redpanda\nbroker](../../api/fastkafka/testing/LocalRedpandaBroker/) for testing\npurposes. The `test.py` file in the cloned repository contains the\nfollowing code for testing.\n\n``` python\nimport pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n    # Start Tester app and create local Redpanda broker for testing\n    async with Tester(kafka_app).using_local_redpanda(\n        tag=\"v23.1.2\", listener_port=9092\n    ) as tester:\n        # Send IrisInputData message to input_data topic\n        await tester.to_input_data(msg)\n\n        # Assert that the kafka_app responded with IrisPrediction in predictions topic\n        await tester.awaited_mocks.on_predictions.assert_awaited_with(\n            IrisPrediction(species=\"setosa\"), timeout=2\n        )\n```\n\nThe\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\nmodule utilizes uses\n[`LocalRedpandaBroker`](../api/fastkafka/testing/LocalRedpandaBroker.md/#fastkafka.testing.LocalRedpandaBroker)\nto start and stop a Redpanda broker for testing purposes using Docker\n\n## 5. Running the tests\n\nWe can run the tests which is in `test.py` file by executing the\nfollowing command:\n\n``` cmd\npytest test.py\n```\n\nThis will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:\n\n``` cmd\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item                                                                 \n\ntest.py .                                                                  [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n```\n\nRunning the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable.\n\n### Recap\n\nWe have created an Iris classification model and encapulated it into our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napplication. The app will consume the `IrisInputData` from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our\n    [`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\n    class with `Redpanda` broker which mirrors the developed app topics\n    for testing purposes\n\n3.  Sent `IrisInputData` message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to `IrisInputData` message\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/index.md",
    "content": "FastKafka\n================\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n<b>Effortless Kafka integration for your web services</b>\n\n------------------------------------------------------------------------\n\n![PyPI](https://img.shields.io/pypi/v/fastkafka.png) ![PyPI -\nDownloads](https://img.shields.io/pypi/dm/fastkafka.png) ![PyPI - Python\nVersion](https://img.shields.io/pypi/pyversions/fastkafka.png)\n\n![GitHub Workflow\nStatus](https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml)\n![CodeQL](https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg)\n![Dependency\nReview](https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg)\n\n![GitHub](https://img.shields.io/github/license/airtai/fastkafka.png)\n\n------------------------------------------------------------------------\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n------------------------------------------------------------------------\n\n#### ⭐⭐⭐ Stay in touch ⭐⭐⭐\n\nPlease show your support and stay in touch by:\n\n- giving our [GitHub repository](https://github.com/airtai/fastkafka/) a\n  star, and\n\n- joining our [Discord server](https://discord.gg/CJWmYpyFbc).\n\nYour support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!\n\n------------------------------------------------------------------------\n\n#### 🐝🐝🐝 We were busy lately 🐝🐝🐝\n\n![Activity](https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg \"Repobeats analytics image\")\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of `fastkafka` with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\nTo install fastkafka with testing features please use:\n\n``` sh\npip install fastkafka[test]\n```\n\nTo install fastkafka with asyncapi docs please use:\n\n``` sh\npip install fastkafka[docs]\n```\n\nTo install fastkafka with all the features please use:\n\n``` sh\npip install fastkafka[test,docs]\n```\n\n## Tutorial\n\nYou can start an interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\" />\n</a>\n\n## Writing server code\n\nHere is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic.\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker.\n\nNext, an object of the\n[`FastKafka`](./api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](./api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstances which internally starts InMemory implementation of Kafka\nbroker.\n\nThe Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies.\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send IrisInputData message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with IrisPrediction in predictions topic\n    await tester.awaited_mocks.on_predictions.assert_awaited_with(\n        IrisPrediction(species=\"setosa\"), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purposes\n\n3.  Sent IrisInputData message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to IrisInputData message\n\n## Running the service\n\nThe service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nTo run the service, you will need a running Kafka broker on localhost as\nspecified in the `kafka_brokers` parameter above. We can start the Kafka\nbroker locally using the\n[`ApacheKafkaBroker`](./api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker).\n\nTo use\n[`ApacheKafkaBroker`](./api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker),\nyou need to install JRE and Kafka to your environment. To simplify this\nprocess, fastkafka comes with a CLI command that does just that, to run\nit, in your terminal execute the following:\n\n``` sh\nfastkafka testing install_deps\n```\n\nNow we can run\n[`ApacheKafkaBroker`](./api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker)\nthat will start a Kafka broker instance for us.\n\n``` python\nfrom fastkafka.testing import ApacheKafkaBroker\n\nbroker = ApacheKafkaBroker(apply_nest_asyncio=True)\n\nbroker.start()\n```\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n    '127.0.0.1:9092'\n\nThen, we start the FastKafka service by running the following command in\nthe folder where the `application.py` file is located:\n\n``` sh\nfastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n```\n\nIn the above command, we use `--num-workers` option to specify how many\nworkers to launch and we use `--kafka-broker` option to specify which\nkafka broker configuration to use from earlier specified `kafka_brokers`\n\n    [801767]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [801765]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [801767]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [801765]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [801767]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [801767]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [801765]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [801765]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [801765]: [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n    [801765]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [801767]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [801767]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [801767]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [801765]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [801767]: [ERROR] aiokafka: Unable to update metadata from [0]\n    [801765]: [ERROR] aiokafka: Unable to update metadata from [0]\n    ^C\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801765...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801767...\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n\nYou need to interupt running of the cell above by selecting\n`Runtime->Interupt execution` on the toolbar above.\n\nFinally, we can stop the local Kafka Broker:\n\n``` python\nbroker.stop()\n```\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 801303...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 801303 was already terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 800930...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 800930 was already terminated.\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nAsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfolloving command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n\n. This will generate the *asyncapi* folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 09:14 docs\n    drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 09:14 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    ^C\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n\n## License\n\nFastKafka is licensed under the Apache License 2.0\n\nA permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code.\n\nThe full text of the license can be found\n[here](https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/overrides/css/extra.css",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/overrides/js/extra.js",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/overrides/js/math.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.6.0/overrides/js/mathjax.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/CHANGELOG.md",
    "content": "# Release notes\n\n<!-- do not remove -->\n\n## 0.7.0\n\n### New Features\n\n- Optional description argument to consumes and produces decorator implemented ([#338](https://github.com/airtai/fastkafka/pull/338)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumes and produces decorators now have optional `description` argument that is used instead of function docstring in async doc generation when specified\n\n- FastKafka Windows OS support enabled ([#326](https://github.com/airtai/fastkafka/pull/326)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now run on Windows\n\n- FastKafka and FastAPI integration implemented ([#304](https://github.com/airtai/fastkafka/pull/304)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now be run alongside FastAPI\n\n- Batch consuming option to consumers implemented ([#298](https://github.com/airtai/fastkafka/pull/298)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumers can consume events in batches by specifying msg type of consuming function as `List[YourMsgType]` \n\n- Removed support for synchronous produce functions ([#295](https://github.com/airtai/fastkafka/pull/295)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Added default broker values and update docs ([#292](https://github.com/airtai/fastkafka/pull/292)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n### Bugs Squashed\n\n- Fix index.ipynb to be runnable in colab ([#342](https://github.com/airtai/fastkafka/issues/342))\n\n- Use cli option root_path docs generate and serve CLI commands ([#341](https://github.com/airtai/fastkafka/pull/341)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Fix incorrect asyncapi docs path on fastkafka docs serve command ([#335](https://github.com/airtai/fastkafka/pull/335)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Serve docs now takes app `root_path` argument into consideration when specified in app\n\n- Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps ([#315](https://github.com/airtai/fastkafka/issues/315))\n\n- Fix logs printing timestamps ([#308](https://github.com/airtai/fastkafka/issues/308))\n\n- Fix topics with dots causing failure of tester instantiation ([#306](https://github.com/airtai/fastkafka/pull/306)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Specified topics can now have \".\" in their names\n\n## 0.6.0\n\n### New Features\n\n- Timestamps added to CLI commands ([#283](https://github.com/airtai/fastkafka/pull/283)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Added option to process messages concurrently ([#278](https://github.com/airtai/fastkafka/pull/278)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - A new `executor` option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.\n\n- Add consumes and produces functions to app ([#274](https://github.com/airtai/fastkafka/pull/274)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Add batching for producers ([#273](https://github.com/airtai/fastkafka/pull/273)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Fix broken links in guides ([#272](https://github.com/airtai/fastkafka/pull/272)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Generate the docusaurus sidebar dynamically by parsing summary.md ([#270](https://github.com/airtai/fastkafka/pull/270)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Metadata passed to consumer ([#269](https://github.com/airtai/fastkafka/pull/269)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(key): read the key value somehow..Maybe I missed something in the docs\n    requirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations.\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Contribution with instructions how to build and test added ([#255](https://github.com/airtai/fastkafka/pull/255)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Export encoders, decoders from fastkafka.encoder ([#246](https://github.com/airtai/fastkafka/pull/246)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n- Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. ([#239](https://github.com/airtai/fastkafka/issues/239))\n\n\n- UI Improvement: Post screenshots with links to the actual messages in testimonials section ([#228](https://github.com/airtai/fastkafka/issues/228))\n\n### Bugs Squashed\n\n- Batch testing fix ([#280](https://github.com/airtai/fastkafka/pull/280)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Tester breaks when using Batching or KafkaEvent producers ([#279](https://github.com/airtai/fastkafka/issues/279))\n\n- Consumer loop callbacks are not executing in parallel ([#276](https://github.com/airtai/fastkafka/issues/276))\n\n\n## 0.5.0\n\n### New Features\n\n- Significant speedup of Kafka producer ([#236](https://github.com/airtai/fastkafka/pull/236)), thanks to [@Sternakt](https://github.com/Sternakt)\n \n\n- Added support for AVRO encoding/decoding ([#231](https://github.com/airtai/fastkafka/pull/231)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n### Bugs Squashed\n\n- Fixed sidebar to include guides in docusaurus documentation ([#238](https://github.com/airtai/fastkafka/pull/238)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Fixed link to symbols in docusaurus docs ([#227](https://github.com/airtai/fastkafka/pull/227)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Removed bootstrap servers from constructor ([#220](https://github.com/airtai/fastkafka/pull/220)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n## 0.4.0\n\n### New Features\n\n- Integrate fastkafka chat ([#208](https://github.com/airtai/fastkafka/pull/208)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add benchmarking ([#206](https://github.com/airtai/fastkafka/pull/206)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Enable fast testing without running kafka locally ([#198](https://github.com/airtai/fastkafka/pull/198)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Generate docs using Docusaurus ([#194](https://github.com/airtai/fastkafka/pull/194)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add test cases for LocalRedpandaBroker ([#189](https://github.com/airtai/fastkafka/pull/189)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Reimplement patch and delegates from fastcore ([#188](https://github.com/airtai/fastkafka/pull/188)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Rename existing functions into start and stop and add lifespan handler ([#117](https://github.com/airtai/fastkafka/issues/117))\n  - https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios\n\n\n## 0.3.1\n\n-  README.md file updated\n\n\n## 0.3.0\n\n### New Features\n\n- Guide for fastkafka produces using partition key ([#172](https://github.com/airtai/fastkafka/pull/172)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #161\n\n- Add support for Redpanda for testing and deployment ([#181](https://github.com/airtai/fastkafka/pull/181)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Remove bootstrap_servers from __init__ and use the name of broker as an option when running/testing ([#134](https://github.com/airtai/fastkafka/issues/134))\n\n- Add a GH action file to check for broken links in the docs ([#163](https://github.com/airtai/fastkafka/issues/163))\n\n- Optimize requirements for testing and docs ([#151](https://github.com/airtai/fastkafka/issues/151))\n\n- Break requirements into base and optional for testing and dev ([#124](https://github.com/airtai/fastkafka/issues/124))\n  - Minimize base requirements needed just for running the service.\n\n- Add link to example git repo into guide for building docs using actions ([#81](https://github.com/airtai/fastkafka/issues/81))\n\n- Add logging for run_in_background ([#46](https://github.com/airtai/fastkafka/issues/46))\n\n- Implement partition Key mechanism for producers ([#16](https://github.com/airtai/fastkafka/issues/16))\n\n### Bugs Squashed\n\n- Implement checks for npm installation and version ([#176](https://github.com/airtai/fastkafka/pull/176)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #158 by checking if the npx is installed and more verbose error handling\n\n- Fix the helper.py link in CHANGELOG.md ([#165](https://github.com/airtai/fastkafka/issues/165))\n\n- fastkafka docs install_deps fails ([#157](https://github.com/airtai/fastkafka/issues/157))\n  - Unexpected internal error: [Errno 2] No such file or directory: 'npx'\n\n- Broken links in docs ([#141](https://github.com/airtai/fastkafka/issues/141))\n\n- fastkafka run is not showing up in CLI docs ([#132](https://github.com/airtai/fastkafka/issues/132))\n\n\n## 0.2.3\n\n- Fixed broken links on PyPi index page\n\n\n## 0.2.2\n\n### New Features\n\n- Extract JDK and Kafka installation out of LocalKafkaBroker ([#131](https://github.com/airtai/fastkafka/issues/131))\n\n- PyYAML version relaxed ([#119](https://github.com/airtai/fastkafka/pull/119)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Replace docker based kafka with local ([#68](https://github.com/airtai/fastkafka/issues/68))\n  - [x] replace docker compose with a simple docker run (standard run_jupyter.sh should do)\n  - [x] replace all tests to use LocalKafkaBroker\n  - [x] update documentation\n\n### Bugs Squashed\n\n- Fix broken link for FastKafka docs in index notebook ([#145](https://github.com/airtai/fastkafka/issues/145))\n\n- Fix encoding issues when loading setup.py on windows OS ([#135](https://github.com/airtai/fastkafka/issues/135))\n\n\n## 0.2.0\n\n### New Features\n\n- Replace kafka container with LocalKafkaBroker ([#112](https://github.com/airtai/fastkafka/issues/112))\n  - - [x] Replace kafka container with LocalKafkaBroker in tests\n- [x] Remove kafka container from tests environment\n- [x] Fix failing tests\n\n### Bugs Squashed\n\n- Fix random failing in CI ([#109](https://github.com/airtai/fastkafka/issues/109))\n\n\n## 0.1.3\n\n- version update in __init__.py\n\n\n## 0.1.2\n\n### New Features\n\n\n- Git workflow action for publishing Kafka docs ([#78](https://github.com/airtai/fastkafka/issues/78))\n\n\n### Bugs Squashed\n\n- Include missing requirement ([#110](https://github.com/airtai/fastkafka/issues/110))\n  - [x] Typer is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py) but it is not included in [settings.ini](https://github.com/airtai/fastkafka/blob/main/settings.ini)\n  - [x] Add aiohttp which is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py)\n  - [x] Add nbformat which is imported in _components/helpers.py\n  - [x] Add nbconvert which is imported in _components/helpers.py\n\n\n## 0.1.1\n\n\n### Bugs Squashed\n\n- JDK install fails on Python 3.8 ([#106](https://github.com/airtai/fastkafka/issues/106))\n\n\n\n## 0.1.0\n\nInitial release\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/CNAME",
    "content": "fastkafka.airt.ai\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/CONTRIBUTING.md",
    "content": "# Contributing to fastkafka\n\nFirst off, thanks for taking the time to contribute! ❤️\n\nAll types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉\n\n> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:\n> - Star the project\n> - Tweet about it\n> - Refer this project in your project's readme\n> - Mention the project at local meetups and tell your friends/colleagues\n\n## Table of Contents\n\n- [I Have a Question](#i-have-a-question)\n- [I Want To Contribute](#i-want-to-contribute)\n  - [Reporting Bugs](#reporting-bugs)\n  - [Suggesting Enhancements](#suggesting-enhancements)\n  - [Your First Code Contribution](#your-first-code-contribution)\n- [Development](#development)\n    - [Prepare the dev environment](#prepare-the-dev-environment)\n    - [Way of working](#way-of-working)\n    - [Before a PR](#before-a-pr)\n\n\n\n## I Have a Question\n\n> If you want to ask a question, we assume that you have read the available [Documentation](https://fastkafka.airt.ai/docs).\n\nBefore you ask a question, it is best to search for existing [Issues](https://github.com/airtai/fastkafka/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.\n\nIf you then still feel the need to ask a question and need clarification, we recommend the following:\n\n- Contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new)\n    - Provide as much context as you can about what you're running into\n\nWe will then take care of the issue as soon as possible.\n\n## I Want To Contribute\n\n> ### Legal Notice \n> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.\n\n### Reporting Bugs\n\n#### Before Submitting a Bug Report\n\nA good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.\n\n- Make sure that you are using the latest version.\n- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](https://fastkafka.airt.ai/docs). If you are looking for support, you might want to check [this section](#i-have-a-question)).\n- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/airtai/fastkafka/issues?q=label%3Abug).\n- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.\n- Collect information about the bug:\n  - Stack trace (Traceback)\n  - OS, Platform and Version (Windows, Linux, macOS, x86, ARM)\n  - Python version\n  - Possibly your input and the output\n  - Can you reliably reproduce the issue? And can you also reproduce it with older versions?\n\n#### How Do I Submit a Good Bug Report?\n\nWe use GitHub issues to track bugs and errors. If you run into an issue with the project:\n\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)\n- Explain the behavior you would expect and the actual behavior.\n- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.\n- Provide the information you collected in the previous section.\n\nOnce it's filed:\n\n- The project team will label the issue accordingly.\n- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.\n- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented.\n\n### Suggesting Enhancements\n\nThis section guides you through submitting an enhancement suggestion for fastkafka, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.\n\n#### Before Submitting an Enhancement\n\n- Make sure that you are using the latest version.\n- Read the [documentation](https://fastkafka.airt.ai/docs) carefully and find out if the functionality is already covered, maybe by an individual configuration.\n- Perform a [search](https://github.com/airtai/fastkafka/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.\n- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.\n- If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n\n#### How Do I Submit a Good Enhancement Suggestion?\n\nEnhancement suggestions are tracked as [GitHub issues](https://github.com/airtai/fastkafka/issues).\n\n- Use a **clear and descriptive title** for the issue to identify the suggestion.\n- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.\n- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.\n- **Explain why this enhancement would be useful** to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.\n\n### Your First Code Contribution\n\nA great way to start contributing to FastKafka would be by solving an issue tagged with \"good first issue\". To find a list of issues that are tagged as \"good first issue\" and are suitable for newcomers, please visit the following link: [Good first issues](https://github.com/airtai/fastkafka/labels/good%20first%20issue)\n\nThese issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in [Way of working](#way-of-working) and [Before a PR](#before-a-pr), and help us make FastKafka even better!\n\nIf you have any questions or need further assistance, feel free to reach out to us. Happy coding!\n\n## Development\n\n### Prepare the dev environment\n\nTo start contributing to fastkafka, you first have to prepare the development environment.\n\n#### Clone the fastkafka repository\n\nTo clone the repository, run the following command in the CLI:\n\n```shell\ngit clone https://github.com/airtai/fastkafka.git\n```\n\n#### Optional: create a virtual python environment\n\nTo prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:\n\n```shell\npython3 -m venv fastkafka-env\n```\n\nAnd to activate your virtual environment run:\n\n```shell\nsource fastkafka-env/bin/activate\n```\n\nTo learn more about virtual environments, please have a look at [official python documentation](https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available.)\n\n#### Install fastkafka\n\nTo install fastkafka, navigate to the root directory of the cloned fastkafka project and run:\n\n```shell\npip install fastkafka -e [.\"dev\"]\n```\n\n#### Install JRE and Kafka toolkit\n\nTo be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:\n\n1. Use our `fastkafka testing install-deps` CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR\n2. Install JRE and Kafka manually.\n   To do this, please refer to [JDK and JRE installation guide](https://docs.oracle.com/javase/9/install/toc.htm) and [Apache Kafka quickstart](https://kafka.apache.org/quickstart)\n   \n#### Install npm\n\nTo be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:\n\n1. Use our `fastkafka docs install_deps` CLI command which will install npm for you in your .local folder\nOR\n2. Install npm manually.\n   To do this, please refer to [NPM installation guide](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)\n   \n#### Install docusaurus\n\nTo generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project.\n\n#### Check if everything works\n\nAfter installing fastkafka and all the necessary dependencies, run `nbdev_test` in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a \"Success.\" message in your terminal, otherwise please refer to previous steps.\n\n### Way of working\n\nThe development of fastkafka is done in Jupyter notebooks. Inside the `nbs` directory you will find all the source code of fastkafka, this is where you will implement your changes.\n\nThe testing, cleanup and exporting of the code is being handled by `nbdev`, please, before starting the work on fastkafka, get familiar with it by reading [nbdev documentation](https://nbdev.fast.ai/getting_started.html).\n\nThe general philosopy you should follow when writing code for fastkafka is:\n\n- Function should be an atomic functionality, short and concise\n   - Good rule of thumb: your function should be 5-10 lines long usually\n- If there are more than 2 params, enforce keywording using *\n   - E.g.: `def function(param1, *, param2, param3): ...`\n- Define typing of arguments and return value\n   - If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected\n- After the function cell, write test cells using the assert keyword\n   - Whenever you implement something you should test that functionality immediately in the cells below \n- Add Google style python docstrings when function is implemented and tested\n\n### Before a PR\n\nAfter you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):\n\n1. Format your notebooks: `nbqa black nbs`\n2. Close, shutdown, and clean the metadata from your notebooks: `nbdev_clean`\n3. Export your code: `nbdev_export`\n4. Run the tests: `nbdev_test`\n5. Test code typing: `mypy fastkafka`\n6. Test code safety with bandit: `bandit -r fastkafka`\n7. Test code safety with semgrep: `semgrep --config auto -r fastkafka`\n\nWhen you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.\n\n## Attribution\nThis guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/LICENSE.md",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/EventMetadata.md",
    "content": "## `fastkafka.EventMetadata` {#fastkafka.EventMetadata}\n\n\nA class for encapsulating Kafka record metadata.\n\n**Parameters**:\n- `topic`: The topic this record is received from\n- `partition`: The partition from which this record is received\n- `offset`: The position of this record in the corresponding Kafka partition\n- `timestamp`: The timestamp of this record\n- `timestamp_type`: The timestamp type of this record\n- `key`: The key (or `None` if no key is specified)\n- `value`: The value\n- `serialized_key_size`: The size of the serialized, uncompressed key in bytes\n- `serialized_value_size`: The size of the serialized, uncompressed value in bytes\n- `headers`: The headers\n\n### `create_event_metadata` {#create_event_metadata}\n\n`def create_event_metadata(record: aiokafka.structs.ConsumerRecord) -> EventMetadata`\n\nCreates an instance of EventMetadata from a ConsumerRecord.\n\n**Parameters**:\n- `record`: The Kafka ConsumerRecord.\n\n**Returns**:\n- The created EventMetadata instance.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/FastKafka.md",
    "content": "## `fastkafka.FastKafka` {#fastkafka.FastKafka}\n\n### `__init__` {#init}\n\n`def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Optional[Dict[str, Any]] = None, root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None`\n\nCreates FastKafka application\n\n**Parameters**:\n- `title`: optional title for the documentation. If None,\nthe title will be set to empty string\n- `description`: optional description for the documentation. If\nNone, the description will be set to empty string\n- `version`: optional version for the documentation. If None,\nthe version will be set to empty string\n- `contact`: optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'\n- `kafka_brokers`: dictionary describing kafka brokers used for setting\nthe bootstrap server when running the applicationa and for\ngenerating documentation. Defaults to\n    {\n        \"localhost\": {\n            \"url\": \"localhost\",\n            \"description\": \"local kafka broker\",\n            \"port\": \"9092\",\n        }\n    }\n- `root_path`: path to where documentation will be created\n- `lifespan`: asynccontextmanager that is used for setting lifespan hooks.\n__aenter__ is called before app start and __aexit__ after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:`async with kafka_app...`\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `executor`: Type of executor to choose for consuming tasks. Avaliable options\nare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\n\"SequentialExecutor\" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n\"DynamicTaskExecutor\" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the consuming function async docs.\nIf not provided, consuming function __doc__ attr will be used.\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_docs` {#create_docs}\n\n`def create_docs(self: fastkafka.FastKafka) -> None`\n\nCreate the asyncapi documentation based on the configured consumers and producers.\n\nThis function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers.\n\nNote:\n    The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\n    attribute of the FastKafka instance.\n\n**Returns**:\n- None\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `fastapi_lifespan` {#fastapi_lifespan}\n\n`def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]`\n\nMethod for managing the lifespan of a FastAPI application with a specific Kafka broker.\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- Lifespan function to use for initializing FastAPI\n\n### `get_topics` {#get_topics}\n\n`def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]`\n\nGet all topics for both producing and consuming.\n\n**Returns**:\n- A set of topics for both producing and consuming.\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the producing function async docs.\nIf not provided, producing function __doc__ attr will be used.\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n### `set_kafka_broker` {#set_kafka_broker}\n\n`def set_kafka_broker(self, kafka_broker_name: str) -> None`\n\nSets the Kafka broker to start FastKafka with\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- None\n\n**Exceptions**:\n- `ValueError`: If the provided kafka_broker_name is not found in dictionary of kafka_brokers\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/KafkaEvent.md",
    "content": "## `fastkafka.KafkaEvent` {#fastkafka.KafkaEvent}\n\n\nA generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel\n\n**Parameters**:\n- `message`: The message contained in the Kafka event, can be of type pydantic.BaseModel.\n- `key`: The optional key used to identify the Kafka event.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/encoder/AvroBase.md",
    "content": "## `fastkafka.encoder.AvroBase` {#fastkafka.encoder.AvroBase}\n\n\nThis is base pydantic class that will add some methods\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/encoder/avro_decoder.md",
    "content": "## `fastkafka.encoder.avro_decoder` {#fastkafka.encoder.avro_decoder}\n\n### `avro_decoder` {#avro_decoder}\n\n`def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any`\n\nDecoder to decode avro encoded messages to pydantic model instance\n\n**Parameters**:\n- `raw_msg`: Avro encoded bytes message received from Kafka topic\n- `cls`: Pydantic class; This pydantic class will be used to construct instance of same class\n\n**Returns**:\n- An instance of given pydantic class\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/encoder/avro_encoder.md",
    "content": "## `fastkafka.encoder.avro_encoder` {#fastkafka.encoder.avro_encoder}\n\n### `avro_encoder` {#avro_encoder}\n\n`def avro_encoder(msg: pydantic.main.BaseModel) -> bytes`\n\nEncoder to encode pydantic instances to avro message\n\n**Parameters**:\n- `msg`: An instance of pydantic basemodel\n\n**Returns**:\n- A bytes message which is encoded from pydantic basemodel\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/encoder/avsc_to_pydantic.md",
    "content": "## `fastkafka.encoder.avsc_to_pydantic` {#fastkafka.encoder.avsc_to_pydantic}\n\n### `avsc_to_pydantic` {#avsc_to_pydantic}\n\n`def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass`\n\nGenerate pydantic model from given Avro Schema\n\n**Parameters**:\n- `schema`: Avro schema in dictionary format\n\n**Returns**:\n- Pydantic model class built from given avro schema\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/encoder/json_decoder.md",
    "content": "## `fastkafka.encoder.json_decoder` {#fastkafka.encoder.json_decoder}\n\n### `json_decoder` {#json_decoder}\n\n`def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any`\n\nDecoder to decode json string in bytes to pydantic model instance\n\n**Parameters**:\n- `raw_msg`: Bytes message received from Kafka topic\n- `cls`: Pydantic class; This pydantic class will be used to construct instance of same class\n\n**Returns**:\n- An instance of given pydantic class\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/encoder/json_encoder.md",
    "content": "## `fastkafka.encoder.json_encoder` {#fastkafka.encoder.json_encoder}\n\n### `json_encoder` {#json_encoder}\n\n`def json_encoder(msg: pydantic.main.BaseModel) -> bytes`\n\nEncoder to encode pydantic instances to json string\n\n**Parameters**:\n- `msg`: An instance of pydantic basemodel\n\n**Returns**:\n- Json string in bytes which is encoded from pydantic basemodel\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/executors/DynamicTaskExecutor.md",
    "content": "## `fastkafka.executors.DynamicTaskExecutor` {#fastkafka.executors.DynamicTaskExecutor}\n\n\nA class that implements a dynamic task executor for processing consumer records.\n\nThe DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task.\n\n### `__init__` {#init}\n\n`def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None`\n\nCreate an instance of DynamicTaskExecutor\n\n**Parameters**:\n- `throw_exceptions`: Flag indicating whether exceptions should be thrown ot logged.\nDefaults to False.\n- `max_buffer_size`: Maximum buffer size for the memory object stream.\nDefaults to 100_000.\n- `size`: Size of the task pool. Defaults to 100_000.\n\n### `run` {#run}\n\n`def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None`\n\nRuns the dynamic task executor.\n\n**Parameters**:\n- `is_shutting_down_f`: Function to check if the executor is shutting down.\n- `generator`: Generator function for retrieving consumer records.\n- `processor`: Processor function for processing consumer records.\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/executors/SequentialExecutor.md",
    "content": "## `fastkafka.executors.SequentialExecutor` {#fastkafka.executors.SequentialExecutor}\n\n\nA class that implements a sequential executor for processing consumer records.\n\nThe SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines.\n\n### `__init__` {#init}\n\n`def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None`\n\nCreate an instance of SequentialExecutor\n\n**Parameters**:\n- `throw_exceptions`: Flag indicating whether exceptions should be thrown or logged.\nDefaults to False.\n- `max_buffer_size`: Maximum buffer size for the memory object stream.\nDefaults to 100_000.\n\n### `run` {#run}\n\n`def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None`\n\nRuns the sequential executor.\n\n**Parameters**:\n- `is_shutting_down_f`: Function to check if the executor is shutting down.\n- `generator`: Generator function for retrieving consumer records.\n- `processor`: Processor function for processing consumer records.\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/testing/ApacheKafkaBroker.md",
    "content": "## `fastkafka.testing.ApacheKafkaBroker` {#fastkafka.testing.ApacheKafkaBroker}\n\n\nApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nInitialises the ApacheKafkaBroker object\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `get_service_config_string` {#get_service_config_string}\n\n`def get_service_config_string(self: fastkafka.testing.ApacheKafkaBroker, service: str, data_dir: pathlib.Path) -> str`\n\nGets the configuration string for a service.\n\n**Parameters**:\n- `service`: Name of the service (\"kafka\" or \"zookeeper\").\n- `data_dir`: Path to the directory where the service will save data.\n\n**Returns**:\n- The service configuration string.\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.ApacheKafkaBroker) -> str`\n\nStarts a local Kafka broker and ZooKeeper instance synchronously.\n\n**Returns**:\n- The Kafka broker bootstrap server address in string format: host:port.\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None`\n\nStops a local kafka broker and zookeeper instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/testing/LocalRedpandaBroker.md",
    "content": "## `fastkafka.testing.LocalRedpandaBroker` {#fastkafka.testing.LocalRedpandaBroker}\n\n\nLocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None`\n\nInitialises the LocalRedpandaBroker object\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n### `get_service_config_string` {#get_service_config_string}\n\n`def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str`\n\nGenerates a configuration for a service\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `service`: \"redpanda\", defines which service to get config string for\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.LocalRedpandaBroker) -> str`\n\nStarts a local redpanda broker instance synchronously\n\n**Returns**:\n- Redpanda broker bootstrap server address in string format: add:port\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None`\n\nStops a local redpanda broker instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/api/fastkafka/testing/Tester.md",
    "content": "## `fastkafka.testing.Tester` {#fastkafka.testing.Tester}\n\n### `__init__` {#init}\n\n`def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nMirror-like object for testing a FastKafka application\n\nCan be used as context manager\n\n**Parameters**:\n- `app`: The FastKafka application to be tested.\n- `broker`: An optional broker to start and to use for testing.\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `executor`: Type of executor to choose for consuming tasks. Avaliable options\nare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\n\"SequentialExecutor\" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n\"DynamicTaskExecutor\" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the consuming function async docs.\nIf not provided, consuming function __doc__ attr will be used.\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_docs` {#create_docs}\n\n`def create_docs(self: fastkafka.FastKafka) -> None`\n\nCreate the asyncapi documentation based on the configured consumers and producers.\n\nThis function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers.\n\nNote:\n    The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\n    attribute of the FastKafka instance.\n\n**Returns**:\n- None\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `fastapi_lifespan` {#fastapi_lifespan}\n\n`def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]`\n\nMethod for managing the lifespan of a FastAPI application with a specific Kafka broker.\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- Lifespan function to use for initializing FastAPI\n\n### `get_topics` {#get_topics}\n\n`def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]`\n\nGet all topics for both producing and consuming.\n\n**Returns**:\n- A set of topics for both producing and consuming.\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the producing function async docs.\nIf not provided, producing function __doc__ attr will be used.\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\nDefault: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n### `set_kafka_broker` {#set_kafka_broker}\n\n`def set_kafka_broker(self, kafka_broker_name: str) -> None`\n\nSets the Kafka broker to start FastKafka with\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- None\n\n**Exceptions**:\n- `ValueError`: If the provided kafka_broker_name is not found in dictionary of kafka_brokers\n\n### `using_local_kafka` {#using_local_kafka}\n\n`def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester`\n\nStarts local Kafka broker used by the Tester instance\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n**Returns**:\n- An instance of tester with Kafka as broker\n\n### `using_local_redpanda` {#using_local_redpanda}\n\n`def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester`\n\nStarts local Redpanda broker used by the Tester instance\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n**Returns**:\n- An instance of tester with Redpanda as broker\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/cli/fastkafka.md",
    "content": "# `fastkafka`\n\n**Usage**:\n\n```console\n$ fastkafka [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `docs`: Commands for managing fastkafka app...\n* `run`: Runs Fast Kafka API application\n* `testing`: Commands for managing fastkafka testing\n\n## `fastkafka docs`\n\nCommands for managing fastkafka app documentation\n\n**Usage**:\n\n```console\n$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `generate`: Generates documentation for a FastKafka...\n* `install_deps`: Installs dependencies for FastKafka...\n* `serve`: Generates and serves documentation for a...\n\n### `fastkafka docs generate`\n\nGenerates documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs generate [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created; default is current directory\n* `--help`: Show this message and exit.\n\n### `fastkafka docs install_deps`\n\nInstalls dependencies for FastKafka documentation generation\n\n**Usage**:\n\n```console\n$ fastkafka docs install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n### `fastkafka docs serve`\n\nGenerates and serves documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs serve [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created; default is current directory\n* `--bind TEXT`: Some info  [default: 127.0.0.1]\n* `--port INTEGER`: Some info  [default: 8000]\n* `--help`: Show this message and exit.\n\n## `fastkafka run`\n\nRuns Fast Kafka API application\n\n**Usage**:\n\n```console\n$ fastkafka run [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--num-workers INTEGER`: Number of FastKafka instances to run, defaults to number of CPU cores.  [default: 4]\n* `--kafka-broker TEXT`: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.  [default: localhost]\n* `--help`: Show this message and exit.\n\n## `fastkafka testing`\n\nCommands for managing fastkafka testing\n\n**Usage**:\n\n```console\n$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `install_deps`: Installs dependencies for FastKafka app...\n\n### `fastkafka testing install_deps`\n\nInstalls dependencies for FastKafka app testing\n\n**Usage**:\n\n```console\n$ fastkafka testing install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/cli/run_fastkafka_server_process.md",
    "content": "# `run_fastkafka_server_process`\n\n**Usage**:\n\n```console\n$ run_fastkafka_server_process [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--kafka-broker TEXT`: Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.  [required]\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_00_FastKafka_Demo.md",
    "content": "# FastKafka tutorial\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\n``` python\ntry:\n    import fastkafka\nexcept:\n    ! pip install fastkafka\n```\n\n## Running in Colab\n\nYou can start this interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\" />\n</a>\n\n## Writing server code\n\nHere is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic.\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/usage/types/#constrained-types),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server.\n\nNext, an object of the `FastKafka` class is initialized with the minimum\nset of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## Testing the service\n\nThe service can be tested using the `Tester` instances which internally\nstarts Kafka broker and zookeeper.\n\nBefore running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:\n\n``` sh\nfastkafka testing install_deps\n```\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send IrisInputData message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with IrisPrediction in predictions topic\n    await tester.awaited_mocks.on_predictions.assert_awaited_with(\n        IrisPrediction(species=\"setosa\"), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purpuoses\n\n3.  Sent IrisInputData message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to IrisInputData message\n\n## Running the service\n\nThe service can be started using builtin `faskafka run` CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nTo run the service, you will need a running Kafka broker on localhost as\nspecified in the `kafka_brokers` parameter above. We can start the Kafka\nbroker locally using the `ApacheKafkaBroker`. Notice that the same\nhappens automatically in the `Tester` as shown above.\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n    '127.0.0.1:9092'\n\nThen, we start the FastKafka service by running the following command in\nthe folder where the `application.py` file is located:\n\n``` sh\nfastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n```\n\nIn the above command, we use `--num-workers` option to specify how many\nworkers to launch and we use `--kafka-broker` option to specify which\nkafka broker configuration to use from earlier specified `kafka_brokers`\n\n    [1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n    [1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n    ^C\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n\nYou need to interupt running of the cell above by selecting\n`Runtime->Interupt execution` on the toolbar above.\n\nFinally, we can stop the local Kafka Broker:\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nWhen running in Colab, we need to update Node.js first:\n\nWe need to install all dependancies for the generator using the\nfollowing command line:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfolloving command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n. This will generate the *asyncapi* folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\n    drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    ^C\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    bootstrap_servers=\"localhost:9092\",\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_01_Intro.md",
    "content": "# Intro\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nThis tutorial will show you how to use <b>FastKafkaAPI</b>, step by\nstep.\n\nThe goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel.\n\nIn this Intro tutorial we’ll go trough the basic requirements to run the\ndemos presented in future steps.\n\n## Installing FastKafkaAPI\n\nFirst step is to install FastKafkaAPI\n\n``` shell\n$ pip install fastkafka\n```\n\n## Preparing a Kafka broker\n\nNext step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication.\n\n!!! info \"Hey, your first info!\"\n\n    If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n\nTo go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times).\n\n!!! warning \"Listen! This is important.\"\n\n    To be able to setup this configuration you need to have Docker and docker-compose installed\n\n    See here for more info on <a href = \\\"https://docs.docker.com/\\\" target=\\\"_blank\\\">Docker</a> and <a href = \\\"https://docs.docker.com/compose/install/\\\" target=\\\"_blank\\\">docker compose</a>\n\nTo setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g. fastkafka_demo). Inside the new\nfolder create a new YAML file named <b>kafka_demo.yml</b> and copy the\nfollowing configuration into it:\n\n``` yaml\nversion: \"3\"\nservices:\n    zookeeper:\n        image: wurstmeister/zookeeper\n        hostname: zookeeper\n        container_name: zookeeper\n        networks:\n          - fastkafka-network\n        ports:\n          - \"2181:2181\"\n          - \"22:22\"\n          - \"2888:2888\"\n          - \"3888:3888\"\n    kafka:\n        image: wurstmeister/kafka\n        container_name: kafka\n        ports:\n          - \"9093:9093\"\n        environment:\n            HOSTNAME_COMMAND: \"docker info | grep ^Name: | cut -d' ' -f 2\"\n            KAFKA_ZOOKEEPER_CONNECT: \"zookeeper:2181\"\n            KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n            KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n            KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n            KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n            KAFKA_CREATE_TOPICS: \"hello:1:1\"\n        volumes:\n            - /var/run/docker.sock:/var/run/docker.sock\n        depends_on:\n            - zookeeper\n        healthcheck:\n            test: [ \"CMD\", \"kafka-topics.sh\", \"--list\", \"--zookeeper\", \"zookeeper:2181\" ]\n            interval: 5s\n            timeout: 10s\n            retries: 5\n        networks:\n          - fastkafka-network\nnetworks:\n    fastkafka-network:\n        name: \"fastkafka-network\"\n```\n\nThis configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a ‘hello’ topic (quite enough for a\nstart). To start the configuration, run:\n\n``` shell\n$ docker-compose -f kafka_demo.yaml up -d --wait\n```\n\nThis will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! :confetti_ball:\n\n## Running the code\n\nAfter installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the ‘First Steps’ part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem.\n\nYou are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_02_First_Steps.md",
    "content": "# First Steps\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Creating a simple Kafka consumer app\n\nFor our first demo we will create the simplest possible Kafka consumer\nand run it using ‘fastkafka run’ command.\n\nThe consumer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n\n2.  Listen to the hello topic\n\n3.  Write any message received from the hello topic to stdout\n\nTo create the consumer, first, create a file named\n<b>hello_kafka_consumer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n    \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n    print(f\"Got data, msg={msg.msg}\", flush=True)\n```\n\n!!! info \"Kafka configuration\"\n\n    This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\n!!! warning \"Remember to flush\"\n\n    Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n\nTo run this consumer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n    [878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n\nNow you can interact with your consumer, by sending the messages to the\nsubscribed ‘hello’ topic, don’t worry, we will cover this in the next\nstep of this guide.\n\n## Sending first message to your consumer\n\nAfter we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly.\n\nIf you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic.\n\nFirst, connect to your running kafka broker by running:\n\n``` shell\ndocker run -it kafka /bin/bash\n```\n\nThen, when connected to the container, run:\n\n``` shell\nkafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n```\n\nThis will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer.\n\nIn the shell, type:\n\n``` shell\n{\"msg\":\"hello\"}\n```\n\nand press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout.\n\nCheck the output of your consumer (terminal where you ran the ‘fastkafka\nrun’ command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:\n\n``` shell\nGot data, msg=hello\n```\n\n## Creating a hello Kafka producer\n\nConsuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let’s create our first\nkafka producer which will send it’s greetings to our consumer\nperiodically.\n\nThe producer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n2.  Connect to the hello topic\n3.  Periodically send a message to the hello world topic\n\nTo create the producer, first, create a file named\n<b>hello_kafka_producer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n    logger.info(f\"Producing: {msg}\")\n    return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello(HelloKafkaMsg(msg=\"hello\"))\n        await asyncio.sleep(1)\n```\n\n!!! info \"Kafka configuration\"\n\n    This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\nTo run this producer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n\nNow, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log.\n\n## Recap\n\nIn this guide we have:\n\n1.  Created a simple Kafka consumer using FastKafka\n2.  Sent a message to our consumer trough Kafka\n3.  Created a simple Kafka producer using FastKafka\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_03_Authentication.md",
    "content": "# Authentication\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## TLS Authentication\n\nsasl_mechanism (str) – Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN\n\nsasl_plain_username (str) – username for SASL PLAIN authentication.\nDefault: None\n\nsasl_plain_password (str) – password for SASL PLAIN authentication.\nDefault: None\n\nsasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_04_Github_Actions_Workflow.md",
    "content": "# Deploy FastKafka docs to GitHub Pages\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Getting started\n\nAdd your workflow file `.github/workflows/fastkafka_docs_deploy.yml` and\npush it to your remote default branch.\n\nHere is an example workflow:\n\n``` yaml\nname: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n  push:\n    branches: [ \"main\", \"master\" ]\n  workflow_dispatch:\n\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n      - uses: airtai/workflows/fastkafka-ghp@main\n        with:\n          app: \"test_fastkafka.application:kafka_app\"\n```\n\n## Options\n\n### Set app location\n\nInput in the form of `path:app`, where `path` is the path to a Python\nfile and `app` is an object of type `FastKafka`:\n\n``` yaml\n- name: Deploy\n  uses: airtai/workflows/fastkafka-ghp@main\n  with:\n    app: \"test_fastkafka.application:kafka_app\"\n```\n\nIn the above example, `FastKafka` app is named as `kafka_app` and it is\navailable in the `application` submodule of the `test_fastkafka` module.\n\n## Example Repository\n\nA `FastKafka`-based library that uses the above-mentioned workfow\nactions to publish FastKafka docs to `Github Pages` can be found\n[here](https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_05_Lifespan_Handler.md",
    "content": "# Lifespan Events\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nDid you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! :rocket:\n\nLets break it down:\n\nYou can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages.\n\nSimilarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up.\n\nBy executing code before consuming and after producing, you cover the\nentire lifecycle of your application :tada:\n\nThis is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!\n\nSo lets give it a try and see how it can make your Kafka app even more\nawesome! :muscle:\n\n## Lifespan example - Iris prediction model\n\nLet’s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don’t want to load them for every\nmessage.\n\nHere’s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we’ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication.\n\n### Lifespan\n\nYou can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager.\n\nLet’s start with an example and then see it in detail.\n\nWe create an async function lifespan() with yield like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \n```\n\nThe first thing to notice, is that we are defining an async function\nwith `yield`. This is very similar to Dependencies with `yield`.\n\nThe first part of the function, before the `yield`, will be executed\n**before** the application starts. And the part after the `yield` will\nbe executed **after** the application has finished.\n\nThis lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown.\n\nThe lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup.\n\nFor demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called.\n\n### Async context manager\n\nContext managers can be used in `with` blocks, our lifespan, for example\ncould be used like this:\n\n``` python\nml_models = {}\nasync with lifespan(None):\n    print(ml_models)\n```\n\nWhen you create a context manager or an async context manager, what it\ndoes is that, before entering the `with` block, it will execute the code\nbefore the `yield`, and after exiting the `with` block, it will execute\nthe code after the `yield`.\n\nIf you want to learn more about context managers and contextlib\ndecorators, please visit [Python official\ndocs](https://docs.python.org/3/library/contextlib.html)\n\n## App demo\n\n### FastKafka app\n\nLets now create our application using the created lifespan handler.\n\nNotice how we passed our lifespan handler to the app when constructing\nit trough the `lifespan` argument.\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n```\n\n### Data modeling\n\nLets model the Iris data for our app:\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Consumers and producers\n\nLets create a consumer and producer for our app that will generate\npredictions from input iris data.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Final app\n\nThe final app looks like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Running the app\n\nNow we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n```\n\nWhen you run the app, you should see a simmilar output to the one below:\n\n    [262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [262292]: Loading the model!\n    [262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n    [262292]: Exiting, clearing model dict!\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n\n## Recap\n\nIn this guide we have defined a lifespan handler and passed to our\nFastKafka app.\n\nSome important points are:\n\n1.  Lifespan handler is implemented as\n    [AsyncContextManager](https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager)\n2.  Code **before** yield in lifespan will be executed **before**\n    application **startup**\n3.  Code **after** yield in lifespan will be executed **after**\n    application **shutdown**\n4.  You can pass your lifespan handler to FastKafka app on\n    initialisation by passing a `lifespan` argument\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_06_Benchmarking_FastKafka.md",
    "content": "# Benchmarking FastKafka app\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\nTo benchmark a `FastKafka` project, you will need the following:\n\n1.  A library built with `FastKafka`.\n2.  A running `Kafka` instance to benchmark the FastKafka application\n    against.\n\n### Creating FastKafka Code\n\nLet’s create a `FastKafka`-based application and write it to the\n`application.py` file based on the [tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n`FastKafka` has a decorator for benchmarking which is appropriately\ncalled as `benchmark`. Let’s edit our `application.py` file and add the\n`benchmark` decorator to the consumes method.\n\n``` python\n# content of the \"application.py\" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nHere we are conducting a benchmark of a function that consumes data from\nthe `input_data` topic with an interval of 1 second and a sliding window\nsize of 5.\n\nThis `benchmark` method uses the `interval` parameter to calculate the\nresults over a specific time period, and the `sliding_window_size`\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation.\n\nThis benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement.\n\n### Starting Kafka\n\nIf you already have a `Kafka` running somewhere, then you can skip this\nstep.\n\nPlease keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself.\n\n#### Installing Java and Kafka\n\nWe need a working `Kafka`instance to benchmark our `FastKafka` app, and\nto run `Kafka` we need `Java`. Thankfully, `FastKafka` comes with a CLI\nto install both `Java` and `Kafka` on our machine.\n\nSo, let’s install `Java` and `Kafka` by executing the following command.\n\n``` cmd\nfastkafka testing install_deps\n```\n\nThe above command will extract `Kafka` scripts at the location\n“\\$HOME/.local/kafka_2.13-3.3.2\" on your machine.\n\n#### Creating configuration for Zookeeper and Kafka\n\nNow we need to start `Zookeeper` and `Kafka` separately, and to start\nthem we need `zookeeper.properties` and `kafka.properties` files.\n\nLet’s create a folder inside the folder where `Kafka` scripts were\nextracted and change directory into it.\n\n``` cmd\nmkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n```\n\nLet’s create a file called `zookeeper.properties` and write the\nfollowing content to the file:\n\n``` txt\ndataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n```\n\nSimilarly, let’s create a file called `kafka.properties` and write the\nfollowing content to the file:\n\n``` txt\nbroker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n```\n\n#### Starting Zookeeper and Kafka\n\nWe need two different terminals to run `Zookeeper` in one and `Kafka` in\nanother. Let’s open a new terminal and run the following commands to\nstart `Zookeeper`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n```\n\nOnce `Zookeeper` is up and running, open a new terminal and execute the\nfollwing commands to start `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n```\n\nNow we have both `Zookeeper` and `Kafka` up and running.\n\n#### Creating topics in Kafka\n\nIn a new terminal, please execute the following command to create\nnecessary topics in `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n```\n\n#### Populating topics with dummy data\n\nTo benchmark our `FastKafka` app, we need some data in `Kafka` topics.\n\nIn the same terminal, let’s create some dummy data:\n\n``` cmd\nyes '{\"sepal_length\": 0.7739560486, \"sepal_width\": 0.8636615789, \"petal_length\": 0.6122663046, \"petal_width\": 0.1338914722}' | head -n 1000000 > /tmp/test_data\n```\n\nThis command will create a file called `test_data` in the `tmp` folder\nwith one million rows of text. This will act as dummy data to populate\nthe `input_data` topic.\n\nLet’s populate the created topic `input_data` with the dummy data which\nwe created above:\n\n``` cmd\n./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n```\n\nNow our topic `input_data` has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again.\n\n### Benchmarking FastKafka\n\nOnce `Zookeeper` and `Kafka` are ready, benchmarking `FastKafka` app is\nas simple as running the `fastkafka run` command:\n\n``` cmd\nfastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n```\n\nThis command will start the `FastKafka` app and begin consuming messages\nfrom `Kafka`, which we spun up earlier. Additionally, the same command\nwill output all of the benchmark throughputs based on the `interval` and\n`sliding_window_size` values.\n\nThe output for the `fastkafka run` command is:\n\n``` txt\n[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n```\n\nBased on the output, when using 1 worker, our `FastKafka` app achieved a\n`throughput` of 93k messages per second and an `average throughput` of\n93k messages per second.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",
    "content": "# Encoding and Decoding Kafka Messages with FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\n1.  A basic knowledge of `FastKafka` is needed to proceed with this\n    guide. If you are not familiar with `FastKafka`, please go through\n    the [tutorial](/docs#tutorial) first.\n2.  `FastKafka` with its dependencies installed is needed. Please\n    install `FastKafka` using the command - `pip install fastkafka`\n\n## Ways to Encode and Decode Messages with FastKafka\n\nIn python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings.\n\nIn FastKafka, we specify message schema using Pydantic models as\nmentioned in [tutorial](/docs#messages):\n\n``` python\n# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nThen, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics.\n\nThe `@consumes` and `@produces` methods of FastKafka accept a parameter\ncalled `decoder`/`encoder` to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:\n\n1.  json - This is the default encoder/decoder option in FastKafka.\n    While producing, this option converts our instance of Pydantic model\n    messages to a JSON string and then converts it to bytes before\n    sending it to the topic. While consuming, it converts bytes to a\n    JSON string and then constructs an instance of Pydantic model from\n    the JSON string.\n2.  avro - This option uses Avro encoding/decoding to convert instances\n    of Pydantic model messages to bytes while producing, and while\n    consuming, it constructs an instance of Pydantic model from bytes.\n3.  custom encoder/decoder - If you are not happy with the json or avro\n    encoder/decoder options, you can write your own encoder/decoder\n    functions and use them to encode/decode Pydantic messages.\n\n## 1. Json encoder and decoder\n\nThe default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string.\n\nWe can use the application from [tutorial](/docs#running-the-service) as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder\nparameter:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"json\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"json\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above code, the `@kafka_app.consumes` decorator sets up a\nconsumer for the “input_data\" topic, using the ‘json’ decoder to convert\nthe message payload to an instance of `IrisInputData`. The\n`@kafka_app.produces` decorator sets up a producer for the “predictions\"\ntopic, using the ‘json’ encoder to convert the instance of\n`IrisPrediction` to message payload.\n\n## 2. Avro encoder and decoder\n\n### What is Avro?\n\nAvro is a row-oriented remote procedure call and data serialization\nframework developed within Apache’s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n[docs](https://avro.apache.org/docs/).\n\n### Installing FastKafka with Avro dependencies\n\n`FastKafka` with dependencies for Apache Avro installed is needed to use\navro encoder/decoder. Please install `FastKafka` with Avro support using\nthe command - `pip install fastkafka[avro]`\n\n### Defining Avro Schema Using Pydantic Models\n\nBy default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models.\n\nSo, similar to the [tutorial](/docs#tutorial), the message schema will\nremain as it is.\n\n``` python\n# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nNo need to change anything to support avro. You can use existing\nPydantic models as is.\n\n### Reusing existing avro schema\n\nIf you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined.\n\n#### Building pydantic models from avro schema dictionary\n\nLet’s modify the above example and let’s assume we have schemas already\nfor `IrisInputData` and `IrisPrediction` which will look like below:\n\n``` python\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n```\n\nWe can easily construct pydantic models from avro schema using\n`avsc_to_pydantic` function which is included as part of `FastKafka`\nitself.\n\n``` python\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\nThe above code will convert avro schema to pydantic models and will\nprint pydantic models’ fields. The output of the above is:\n\n``` txt\n{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n```\n\nThis is exactly same as manually defining the pydantic models ourselves.\nYou don’t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using `avsc_to_pydantic` function as\ndemonstrated above.\n\n#### Building pydantic models from `.avsc` file\n\nNot all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary `.avsc` files in\nfilesystem. Let’s see how to convert those `.avsc` files to pydantic\nmodels.\n\nLet’s assume our avro files are stored in files called\n`iris_input_data_schema.avsc` and `iris_prediction_schema.avsc`. In that\ncase, following code converts the schema to pydantic models:\n\n``` python\nimport json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open(\"iris_input_data_schema.avsc\", \"rb\") as f:\n    iris_input_data_schema = json.load(f)\n    \nwith open(\"iris_prediction_schema.avsc\", \"rb\") as f:\n    iris_prediction_schema = json.load(f)\n    \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\n### Consume/Produce avro messages with FastKafka\n\n`FastKafka` provides `@consumes` and `@produces` methods to\nconsume/produces messages to/from a `Kafka` topic. This is explained in\n[tutorial](/docs#function-decorators).\n\nThe `@consumes` and `@produces` methods accepts a parameter called\n`decoder`/`encoder` to decode/encode avro messages.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", encoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", decoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above example, in `@consumes` and `@produces` methods, we\nexplicitly instruct FastKafka to `decode` and `encode` messages using\nthe `avro` `decoder`/`encoder` instead of the default `json`\n`decoder`/`encoder`.\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use `avro` to `decode` and\n`encode` messages:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThe above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the `avsc_to_pydantic` function from the FastKafka library\nto convert the Avro schema into Pydantic models, which will be used to\ndecode and encode Avro messages.\n\nThe `FastKafka` class is then instantiated with the broker details, and\ntwo functions decorated with `@kafka_app.consumes` and\n`@kafka_app.produces` are defined to consume messages from the\n“input_data\" topic and produce messages to the “predictions\" topic,\nrespectively. The functions uses the decoder=“avro\" and encoder=“avro\"\nparameters to decode and encode the Avro messages.\n\nIn summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline.\n\n## 3. Custom encoder and decoder\n\nIf you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages.\n\n### Writing a custom encoder and decoder\n\nIn this section, let’s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n[ROT13](https://en.wikipedia.org/wiki/ROT13) cipher.\n\n``` python\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n```\n\nThe above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library.\n\nThe encoding function, `custom_encoder()`, takes a message `msg` which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe `json()` method, obfuscates the resulting string using the ROT13\nalgorithm from the `codecs` module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding.\n\nThe decoding function, `custom_decoder()`, takes a raw message `raw_msg`\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the `json.loads()` method and returns a\nnew instance of the specified `cls` class initialized with the decoded\ndictionary.\n\nThese functions can be used with FastKafka’s `encoder` and `decoder`\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics.\n\nLet’s test the above code\n\n``` python\ni = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n```\n\nThis will result in following output\n\n``` txt\nb'{\"frcny_yratgu\": 0.5, \"frcny_jvqgu\": 0.5, \"crgny_yratgu\": 0.5, \"crgny_jvqgu\": 0.5}'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n```\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use our custom decoder and\nencoder functions:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\n\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThis code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system.\n\nThe custom `encoder` function takes a message represented as a\n`BaseModel` and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned.\n\nThe custom `decoder` function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the `json`\nmodule. This dictionary is then converted to a `BaseModel` instance\nusing the cls parameter.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_11_Consumes_Basics.md",
    "content": "# @consumes basics\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@consumes` decorator to consume messages from Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will consume\n`HelloWorld` messages from hello_world topic.\n\n## Import `FastKafka`\n\nTo use the `@consumes` decorator, first we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\nIn this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger.\n\n``` python\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to\nconsume from the topic using [pydantic](https://docs.pydantic.dev/). For\nthe guide we’ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a consumer function and decorate it with `@consumes`\n\nLet’s create a consumer function that will consume `HelloWorld` messages\nfrom *hello_world* topic and log them.\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nThe function decorated with the `@consumes` decorator will be called\nwhen a message is produced to Kafka.\n\nThe message will then be injected into the typed *msg* argument of the\nfunction and its type will be used to parse the message.\n\nIn this example case, when the message is sent into a *hello_world*\ntopic, it will be parsed into a HelloWorld class and `on_hello_world`\nfunction will be called with the parsed class as *msg* argument value.\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in consumer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.\n\n## Send the message to kafka topic\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\n``` python\nprint(consumer_task.value[1].decode(\"UTF-8\"))\n```\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are\nreceiving the message from, this is because the `@consumes` decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default “on\\_\" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n*hello_world*.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nconsumes decorator, like this:\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in consumes decorator, like this:\n\n## Message data\n\nThe message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of *msg* parameter in the\nfunction decorated by the `@consumes` decorator.\n\nIn this example case, the message will be parsed into a `HelloWorld`\nclass.\n\n## Message metadata\n\nIf you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction.\n\nLet’s demonstrate that.\n\n### Create a consumer function with metadata\n\nThe only difference from the original basic consume function is that we\nare now passing the `meta: EventMetadata` argument to the function. The\n`@consumes` decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains.\n\nFirst, we need to import the EventMetadata\n\nNow we can add the `meta` argument to our consuming function.\n\nYour final app should look like this:\n\nNow lets run the app and send a message to the broker to see the logged\nmessage metadata.\n\nYou should see a similar log as the one below and the metadata being\nlogged in your app.\n\nAs you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n:tada:\n\n## Dealing with high latency consuming functions\n\nIf your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consumeing events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead.\n\nBut, to handle those high latency tasks and run them in parallel,\nFastKafka has a `DynamicTaskExecutor` prepared for your consumers. This\nexecutor comes with additional overhead, so use it only when you need to\nhandle high latency functions.\n\nLets demonstrate how to use it.\n\n``` python\ndecorate_consumes_executor = \"\"\"@app.consumes(executor=\"DynamicTaskExecutor\")\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n\"\"\"\nmd(f\"```python\\n{decorate_consumes}\\n```\")\n```\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_12_Batch_Consuming.md",
    "content": "# Batch consuming\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nIf you want to consume data in batches `@consumes` decorator makes that\npossible for you. By typing a consumed msg object as a `list` of\nmessages the consumer will call your consuming function with a batch of\nmessages consumed from a single partition. Let’s demonstrate that now.\n\n## Consume function with batching\n\nTo consume messages in batches, you need to wrap you message type into a\nlist and the `@consumes` decorator will take care of the rest for you.\nYour consumes function will be called with batches grouped by partition\nnow.\n\n``` python\n@app.consumes(auto_offset_reset=\"earliest\")\nasync def on_hello_world(msg: List[HelloWorld]):\n    logger.info(f\"Got msg batch: {msg}\")\n```\n\n## App example\n\nWe will modify the app example from [@consumes\nbasics](/docs/guides/Guide_11_Consumes_Basics.md) guide to consume\n`HelloWorld` messages batch. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nimport asyncio\nfrom typing import List\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(auto_offset_reset=\"earliest\")\nasync def on_hello_world(msg: List[HelloWorld]):\n    logger.info(f\"Got msg batch: {msg}\")\n```\n\n## Send the messages to kafka topic\n\nLets send a couple of `HelloWorld` messages to the *hello_world* topic\nand check if our consumer kafka application has logged the received\nmessages batch. In your terminal, run the following command at least two\ntimes to create multiple messages in your kafka queue:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nNow we can run the app. Copy the code of the example app in\nconsumer_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nYou should see the your Kafka messages being logged in batches by your\nconsumer.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_21_Produces_Basics.md",
    "content": "# @produces basics\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@produces` decorator to produce messages to Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic.\n\n## Import `FastKafka`\n\nTo use the `@produces` decorator, frist we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to send\nto the topic using [pydantic](https://docs.pydantic.dev/). For the guide\nwe’ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a producer function and decorate it with `@produces`\n\nLet’s create a producer function that will produce `HelloWorld` messages\nto *hello_world* topic:\n\n``` python\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nNow you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic.\n\nBy default, the topic is determined from your function name, the “to\\_\"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is *hello_world*.\n\n## Instruct the app to start sending HelloWorld messages\n\nLet’s use `@run_in_background` decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second.\n\n``` python\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\n``` python\nscript_file = \"producer_example.py\"\ncmd = \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\"\nmd(\n    f\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```\"\n)\n```\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n\n## Check if the message was sent to the Kafka topic\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the {“msg\": “Hello world!\"} messages in your topic.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are sending\nthe message to, this is because the `@produces` decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default “to\\_\" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n*hello_world*.\n\n!!! warn \"New topics\"\n\n    Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nproduces decorator, like this:\n\n``` python\n\n@app.produces(prefix=\"send_to_\")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in produces decorator, like this:\n\n``` python\n\n@app.produces(topic=\"my_special_topic\")\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\n## Message data\n\nThe return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app.\n\nIn this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n`b'{\"msg\": \"Hello world!\"}'`\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_22_Partition_Keys.md",
    "content": "# Defining a partition key\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nPartition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance.\n\nYou can define your partition keys when using the `@produces` decorator,\nthis guide will demonstrate to you this feature.\n\n## Return a key from the producing function\n\nTo define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into `KafkaEvent` class and set the key\nvalue. Check the example below:\n\n``` python\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n```\n\nIn the example, we want to return the `HelloWorld` message class with\nthe key defined as *my_key*. So, we wrap the message and key into a\nKafkaEvent class and return it as such.\n\nWhile generating the documentation, the `KafkaEvent` class will be\nunwrapped and the `HelloWorld` class will be documented in the\ndefinition of message type, same way if you didn’t use the key.\n\n!!! info \"Which key to choose?\"\n\n    Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n\n## App example\n\nWe will modify the app example from **@producer basics** guide to return\nthe `HelloWorld` with our key. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n\n## Check if the message was sent to the Kafka topic with the desired key\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic with the defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the *my_key {“msg\": “Hello world!\"}* messages in your\ntopic appearing, the *my_key* part of the message is the key that we\ndefined in our producing function.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_23_Batch_Producing.md",
    "content": "# Batch producing\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nIf you want to send your data in batches `@produces` decorator makes\nthat possible for you. By returning a `list` of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker.\n\nThis guide will demonstrate how to use this feature.\n\n## Return a batch from the producing function\n\nTo define a batch that you want to produce to Kafka topic, you need to\nreturn the `List` of the messages that you want to be batched from your\nproducing function.\n\n``` python\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\nIn the example, we want to return the `HelloWorld` message class batch\nthat is created from a list of msgs we passed into our producing\nfunction.\n\nLets also prepare a backgound task that will send a batch of “hello\nworld\" messages when the app starts.\n\n``` python\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n```\n\n## App example\n\nWe will modify the app example from [@producer\nbasics](/docs/guides/Guide_21_Produces_Basics.md) guide to return the\n`HelloWorld` batch. The final app will look like this (make sure you\nreplace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n    [46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n\n## Check if the batch was sent to the Kafka topic with the defined key\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages in your topic.\n\n## Batch key\n\nTo define a key for your batch like in [Defining a partition\nkey](/docs/guides/Guide_22_Partition_Keys.md) guide you can wrap the\nreturning value in a `KafkaEvent` class. To learn more about defining a\npartition ke and `KafkaEvent` class, please, have a look at [Defining a\npartition key](/docs/guides/Guide_22_Partition_Keys.md) guide.\n\nLet’s demonstrate that.\n\nTo define a key, we just need to modify our producing function, like\nthis:\n\n``` python\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\nNow our app looks like this:\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\n## Check if the batch was sent to the Kafka topic\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic, containing a defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages with the defined key in your topic.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",
    "content": "# Using multiple Kafka clusters\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nReady to take your FastKafka app to the next level? This guide shows you\nhow to connect to multiple Kafka clusters effortlessly. Consolidate\ntopics and produce messages across clusters like a pro. Unleash the full\npotential of your Kafka-powered app with FastKafka. Let’s dive in and\nelevate your application’s capabilities!\n\n### Test message\n\nTo showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called `TestMsg`. Here’s the\ndefinition of the `TestMsg` class:\n\n``` python\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n```\n\n## Defining multiple broker configurations\n\nWhen building a FastKafka application, you may need to consume messages\nfrom multiple Kafka clusters, each with its own set of broker\nconfigurations. FastKafka provides the flexibility to define different\nbroker clusters using the brokers argument in the consumes decorator.\nLet’s explore an example code snippet\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(\n    development=dict(url=\"dev.server_1\", port=9092),\n    production=dict(url=\"prod.server_1\", port=9092),\n)\nkafka_brokers_2 = dict(\n    development=dict(url=\"dev.server_2\", port=9092),\n    production=dict(url=\"prod.server_1\", port=9092),\n)\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n    print(f\"Received on s1: {msg=}\")\n    await to_predictions_1(msg)\n\n\n@app.consumes(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n    print(f\"Received on s2: {msg=}\")\n    await to_predictions_2(msg)\n    \n@app.produces(topic=\"predictions\")\nasync def to_predictions_1(msg: TestMsg) -> TestMsg:\n    return msg\n    \n@app.produces(topic=\"predictions\", brokers=kafka_brokers_2)\nasync def to_predictions_2(msg: TestMsg) -> TestMsg:\n    return msg\n```\n\nIn this example, the application has two consumes endpoints, both of\nwhich will consume events from `preprocessed_signals` topic.\n`on_preprocessed_signals_1` will consume events from `kafka_brokers_1`\nconfiguration and `on_preprocessed_signals_2` will consume events from\n`kafka_brokers_2` configuration. When producing, `to_predictions_1` will\nproduce to `predictions` topic on `kafka_brokers_1` cluster and\n`to_predictions_2` will produce to `predictions` topic on\n`kafka_brokers_2` cluster.\n\n#### How it works\n\nThe `kafka_brokers_1` configuration represents the primary cluster,\nwhile `kafka_brokers_2` serves as an alternative cluster specified in\nthe decorator.\n\nUsing the FastKafka class, the app object is initialized with the\nprimary broker configuration (`kafka_brokers_1`). By default, the\n`@app.consumes` decorator without the brokers argument consumes messages\nfrom the `preprocessed_signals` topic on `kafka_brokers_1`.\n\nTo consume messages from a different cluster, the `@app.consumes`\ndecorator includes the `brokers` argument. This allows explicit\nspecification of the broker cluster in the `on_preprocessed_signals_2`\nfunction, enabling consumption from the same topic but using the\n`kafka_brokers_2` configuration.\n\nThe brokers argument can also be used in the @app.produces decorator to\ndefine multiple broker clusters for message production.\n\nIt’s important to ensure that all broker configurations have the same\nrequired settings as the primary cluster to ensure consistent behavior.\n\n## Testing the application\n\nTo test our FastKafka ‘mirroring’ application, we can use our testing\nframework. Lets take a look how it’s done:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\n    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\"signal_s1\"))\n    # Assert on_preprocessed_signals_1 consumed sent message\n    await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\n        TestMsg(msg=\"signal_s1\"), timeout=5\n    )\n    # Assert app has produced a prediction\n    await tester.mirrors[app.to_predictions_1].assert_called_with(\n        TestMsg(msg=\"signal_s1\"), timeout=5\n    )\n\n    # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\n    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\"signal_s2\"))\n    # Assert on_preprocessed_signals_2 consumed sent message\n    await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\n        TestMsg(msg=\"signal_s2\"), timeout=5\n    )\n    # Assert app has produced a prediction\n    await tester.mirrors[app.to_predictions_2].assert_called_with(\n        TestMsg(msg=\"signal_s2\"), timeout=5\n    )\n```\n\n    23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:33:08.721 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:33:08.721 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n    23-05-30 10:33:08.722 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.722 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n    23-05-30 10:33:08.723 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.741 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n    23-05-30 10:33:08.741 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.742 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n    23-05-30 10:33:08.743 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.744 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n    23-05-30 10:33:08.746 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:08.749 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n    23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.755 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.756 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:08.756 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n    23-05-30 10:33:08.758 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.758 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:33:08.759 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:08.760 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.761 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n    23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.762 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.763 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:33:08.763 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    Received on s1: msg=TestMsg(msg='signal_s1')\n    Received on s2: msg=TestMsg(msg='signal_s2')\n    23-05-30 10:33:13.745 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.747 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.748 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.751 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:33:13.754 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nThe usage of the `tester.mirrors` dictionary allows specifying the\ndesired topic/broker combination for sending the test messages,\nespecially when working with multiple Kafka clusters. This ensures that\nthe data is sent to the appropriate topic/broker based on the consuming\nfunction, and consumed from appropriate topic/broker based on the\nproducing function.\n\n## Running the application\n\nYou can run your application using `fastkafka run` CLI command in the\nsame way that you would run a single cluster app.\n\nTo start your app, copy the code above in multi_cluster_example.py and\nrun it by running:\n\nNow we can run the app. Copy the code above in multi_cluster_example.py,\nadjust your server configurations, and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\n```\n\nIn your app logs, you should see your app starting up and your two\nconsumer functions connecting to different kafka clusters.\n\n    [90735]: 23-05-30 10:33:29.699 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n    [90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:57647'}\n    [90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n    [90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n    [90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n    [90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n    [90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [90735]: 23-05-30 10:33:29.722 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n    [90735]: 23-05-30 10:33:29.723 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n    Starting process cleanup, this may take a few seconds...\n    23-05-30 10:33:33.548 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 90735...\n    [90735]: 23-05-30 10:33:34.666 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:34.777 [INFO] fastkafka._server: terminate_asyncio_process(): Process 90735 terminated.\n\n## Application documentation\n\nAt the moment the documentation for multicluster app is not yet\nimplemented, but it is under development and you can expecti it soon!\n\n## Examples on how to use multiple broker configurations\n\n### Example \\#1\n\nIn this section, we’ll explore how you can effectively forward topics\nbetween different Kafka clusters, enabling seamless data synchronization\nfor your applications.\n\nImagine having two Kafka clusters, namely `kafka_brokers_1` and\n`kafka_brokers_2`, each hosting its own set of topics and messages. Now,\nif you want to forward a specific topic (in this case:\n`preprocessed_signals`) from kafka_brokers_1 to kafka_brokers_2,\nFastKafka provides an elegant solution.\n\nLet’s examine the code snippet that configures our application for topic\nforwarding:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_original(msg: TestMsg):\n    await to_preprocessed_signals_forward(msg)\n\n\n@app.produces(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\n    return data\n```\n\nHere’s how it works: our FastKafka application is configured to consume\nmessages from `kafka_brokers_1` and process them in the\n`on_preprocessed_signals_original` function. We want to forward these\nmessages to `kafka_brokers_2`. To achieve this, we define the\n`to_preprocessed_signals_forward` function as a producer, seamlessly\nproducing the processed messages to the preprocessed_signals topic\nwithin the `kafka_brokers_2` cluster.\n\n#### Testing\n\nTo test our FastKafka forwarding application, we can use our testing\nframework. Let’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg=\"signal\"))\n    await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\n```\n\n    23-05-30 10:33:40.969 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:33:40.970 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:33:40.971 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:33:40.972 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-05-30 10:33:40.972 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:40.982 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:33:40.982 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:40.983 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:40.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:33:40.984 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:40.985 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:40.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:40.986 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:40.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-05-30 10:33:40.989 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:40.989 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:40.991 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:44.983 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:44.986 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:44.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:33:44.988 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nWith the help of the **Tester** object, we can simulate and verify the\nbehavior of our FastKafka application. Here’s how it works:\n\n1.  We create an instance of the **Tester** by passing in our *app*\n    object, which represents our FastKafka application.\n\n2.  Using the **tester.mirrors** dictionary, we can send a message to a\n    specific Kafka broker and topic combination. In this case, we use\n    `tester.mirrors[app.on_preprocessed_signals_original]` to send a\n    TestMsg message with the content “signal\" to the appropriate Kafka\n    broker and topic.\n\n3.  After sending the message, we can perform assertions on the mirrored\n    function using\n    `tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)`.\n    This assertion ensures that the mirrored function has been called\n    within a specified timeout period (in this case, 5 seconds).\n\n### Example \\#2\n\nIn this section, we’ll explore how you can effortlessly consume data\nfrom multiple sources, process it, and aggregate the results into a\nsingle topic on a specific cluster.\n\nImagine you have two Kafka clusters: **kafka_brokers_1** and\n**kafka_brokers_2**, each hosting its own set of topics and messages.\nNow, what if you want to consume data from both clusters, perform some\nprocessing, and produce the results to a single topic on\n**kafka_brokers_1**? FastKafka has got you covered!\n\nLet’s take a look at the code snippet that configures our application\nfor aggregating multiple clusters:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n    print(f\"Default: {msg=}\")\n    await to_predictions(msg)\n\n\n@app.consumes(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n    print(f\"Specified: {msg=}\")\n    await to_predictions(msg)\n\n\n@app.produces(topic=\"predictions\")\nasync def to_predictions(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction: {prediction}\")\n    return [prediction]\n```\n\nHere’s the idea: our FastKafka application is set to consume messages\nfrom the topic “preprocessed_signals\" on **kafka_brokers_1** cluster, as\nwell as from the same topic on **kafka_brokers_2** cluster. We have two\nconsuming functions, `on_preprocessed_signals_1` and\n`on_preprocessed_signals_2`, that handle the messages from their\nrespective clusters. These functions perform any required processing, in\nthis case, just calling the to_predictions function.\n\nThe exciting part is that the to_predictions function acts as a\nproducer, sending the processed results to the “predictions\" topic on\n**kafka_brokers_1 cluster**. By doing so, we effectively aggregate the\ndata from multiple sources into a single topic on a specific cluster.\n\nThis approach enables you to consume data from multiple Kafka clusters,\nprocess it, and produce the aggregated results to a designated topic.\nWhether you’re generating predictions, performing aggregations, or any\nother form of data processing, FastKafka empowers you to harness the\nfull potential of multiple clusters.\n\n#### Testing\n\nLet’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\"signal\"))\n    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\"signal\"))\n    await tester.on_predictions.assert_called(timeout=5)\n```\n\n    23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:33:50.828 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:33:50.829 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:33:50.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:50.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:33:50.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:50.876 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-05-30 10:33:50.876 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:50.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:50.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:50.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:50.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:50.880 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-05-30 10:33:50.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:50.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:50.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:50.883 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:50.884 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:33:50.885 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:50.885 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:33:50.886 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    Default: msg=TestMsg(msg='signal')\n    Sending prediction: msg='signal'\n    Specified: msg=TestMsg(msg='signal')\n    Sending prediction: msg='signal'\n    23-05-30 10:33:54.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:54.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:54.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:54.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:54.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:54.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:54.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nHere’s how the code above works:\n\n1.  Within an `async with` block, create an instance of the Tester by\n    passing in your app object, representing your FastKafka application.\n\n2.  Using the tester.mirrors dictionary, you can send messages to\n    specific Kafka broker and topic combinations. In this case, we use\n    `tester.mirrors[app.on_preprocessed_signals_1]` and\n    `tester.mirrors[app.on_preprocessed_signals_2]` to send TestMsg\n    messages with the content “signal\" to the corresponding Kafka broker\n    and topic combinations.\n\n3.  After sending the messages, you can perform assertions on the\n    **on_predictions** function using\n    `tester.on_predictions.assert_called(timeout=5)`. This assertion\n    ensures that the on_predictions function has been called within a\n    specified timeout period (in this case, 5 seconds).\n\n### Example \\#3\n\nIn some scenarios, you may need to produce messages to multiple Kafka\nclusters simultaneously. FastKafka simplifies this process by allowing\nyou to configure your application to produce messages to multiple\nclusters effortlessly. Let’s explore how you can achieve this:\n\nConsider the following code snippet that demonstrates producing messages\nto multiple clusters:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals(msg: TestMsg):\n    print(f\"{msg=}\")\n    await to_predictions_1(TestMsg(msg=\"prediction\"))\n    await to_predictions_2(TestMsg(msg=\"prediction\"))\n\n\n@app.produces(topic=\"predictions\")\nasync def to_predictions_1(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction to s1: {prediction}\")\n    return [prediction]\n\n\n@app.produces(topic=\"predictions\", brokers=kafka_brokers_2)\nasync def to_predictions_2(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction to s2: {prediction}\")\n    return [prediction]\n```\n\nHere’s what you need to know about producing to multiple clusters:\n\n1.  We define two Kafka broker configurations: **kafka_brokers_1** and\n    **kafka_brokers_2**, representing different clusters with their\n    respective connection details.\n\n2.  We create an instance of the FastKafka application, specifying\n    **kafka_brokers_1** as the primary cluster for producing messages.\n\n3.  The `on_preprocessed_signals` function serves as a consumer,\n    handling incoming messages from the “preprocessed_signals\" topic.\n    Within this function, we invoke two producer functions:\n    `to_predictions_1` and `to_predictions_2`.\n\n4.  The `to_predictions_1` function sends predictions to the\n    “predictions\" topic on *kafka_brokers_1* cluster.\n\n5.  Additionally, the `to_predictions_2` function sends the same\n    predictions to the “predictions\" topic on *kafka_brokers_2* cluster.\n    This allows for producing the same data to multiple clusters\n    simultaneously.\n\nBy utilizing this approach, you can seamlessly produce messages to\nmultiple Kafka clusters, enabling you to distribute data across\ndifferent environments or leverage the strengths of various clusters.\n\nFeel free to customize the producer functions as per your requirements,\nperforming any necessary data transformations or enrichment before\nsending the predictions.\n\nWith FastKafka, producing to multiple clusters becomes a breeze,\nempowering you to harness the capabilities of multiple environments\neffortlessly.\n\n#### Testing\n\nLet’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.to_preprocessed_signals(TestMsg(msg=\"signal\"))\n    await tester.mirrors[to_predictions_1].assert_called(timeout=5)\n    await tester.mirrors[to_predictions_2].assert_called(timeout=5)\n```\n\n    23-05-30 10:34:00.033 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:34:00.034 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:34:00.035 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:34:00.036 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:34:00.037 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:34:00.038 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-05-30 10:34:00.038 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:34:00.052 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:34:00.053 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:34:00.054 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:34:00.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:34:00.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:34:00.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:34:00.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:34:00.057 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:34:00.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:34:00.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:34:00.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:34:00.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:34:00.062 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:34:00.062 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-05-30 10:34:00.064 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:34:00.064 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:34:00.065 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    msg=TestMsg(msg='signal')\n    Sending prediction to s1: msg='prediction'\n    Sending prediction to s2: msg='prediction'\n    23-05-30 10:34:04.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:34:04.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:34:04.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:34:04.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:34:04.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:34:04.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:34:04.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nHere’s how you can perform the necessary tests:\n\n1.  Within an async with block, create an instance of the **Tester** by\n    passing in your app object, representing your FastKafka application.\n\n2.  Using the `tester.to_preprocessed_signals` method, you can send a\n    TestMsg message with the content “signal\".\n\n3.  After sending the message, you can perform assertions on the\n    to_predictions_1 and to_predictions_2 functions using\n    `tester.mirrors[to_predictions_1].assert_called(timeout=5)` and\n    `tester.mirrors[to_predictions_2].assert_called(timeout=5)`. These\n    assertions ensure that the respective producer functions have\n    produced data to their respective topic/broker combinations.\n\nBy employing this testing approach, you can verify that the producing\nfunctions correctly send messages to their respective clusters. The\ntesting framework provided by FastKafka enables you to ensure the\naccuracy and reliability of your application’s producing logic.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",
    "content": "# Deploying FastKafka using Docker\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Building a Docker Image\n\nTo build a Docker image for a FastKafka project, we need the following\nitems:\n\n1.  A library that is built using FastKafka.\n2.  A file in which the requirements are specified. This could be a\n    requirements.txt file, a setup.py file, or even a wheel file.\n3.  A Dockerfile to build an image that will include the two files\n    mentioned above.\n\n### Creating FastKafka Code\n\nLet’s create a `FastKafka`-based application and write it to the\n`application.py` file based on the [tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Creating requirements.txt file\n\nThe above code only requires `fastkafka`. So, we will add only\n`fastkafka` to the `requirements.txt` file, but you can add additional\nrequirements to it as well.\n\n``` txt\nfastkafka>=0.3.0\n```\n\nHere we are using `requirements.txt` to store the project’s\ndependencies. However, other methods like `setup.py`, `pipenv`, and\n`wheel` files can also be used. `setup.py` is commonly used for\npackaging and distributing Python modules, while `pipenv` is a tool used\nfor managing virtual environments and package dependencies. `wheel`\nfiles are built distributions of Python packages that can be installed\nwith pip.\n\n### Creating Dockerfile\n\n``` dockerfile\n# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD [\"fastkafka\", \"run\", \"--num-workers\", \"2\", \"--kafka-broker\", \"production\", \"application:kafka_app\"]\n```\n\n1.  Start from the official Python base image.\n\n2.  Set the current working directory to `/project`.\n\n    This is where we’ll put the `requirements.txt` file and the\n    `application.py` file.\n\n3.  Copy the `application.py` file and `requirements.txt` file inside\n    the `/project` directory.\n\n4.  Install the package dependencies in the requirements file.\n\n    The `--no-cache-dir` option tells `pip` to not save the downloaded\n    packages locally, as that is only if `pip` was going to be run again\n    to install the same packages, but that’s not the case when working\n    with containers.\n\n    The `--upgrade` option tells `pip` to upgrade the packages if they\n    are already installed.\n\n5.  Set the **command** to run the `fastkafka run` command.\n\n    `CMD` takes a list of strings, each of these strings is what you\n    would type in the command line separated by spaces.\n\n    This command will be run from the **current working directory**, the\n    same `/project` directory you set above with `WORKDIR /project`.\n\n    We supply additional parameters `--num-workers` and `--kafka-broker`\n    for the run command. Finally, we specify the location of our\n    `fastkafka` application location as a command argument.\n\n    To learn more about `fastkafka run` command please check the [CLI\n    docs](../../cli/fastkafka/#fastkafka-run).\n\n### Build the Docker Image\n\nNow that all the files are in place, let’s build the container image.\n\n1.  Go to the project directory (where your `Dockerfile` is, containing\n    your `application.py` file).\n\n2.  Run the following command to build the image:\n\n    ``` cmd\n    docker build -t fastkafka_project_image .\n    ```\n\n    This command will create a docker image with the name\n    `fastkafka_project_image` and the `latest` tag.\n\nThat’s it! You have now built a docker image for your FastKafka project.\n\n### Start the Docker Container\n\nRun a container based on the built image:\n\n``` cmd\ndocker run -d --name fastkafka_project_container fastkafka_project_image\n```\n\n## Additional Security\n\n`Trivy` is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here’s\nhow you can use `trivy` to scan your `fastkafka_project_image`:\n\n1.  Install `trivy` on your local machine by following the instructions\n    provided in the [official `trivy`\n    documentation](https://aquasecurity.github.io/trivy/latest/getting-started/installation/).\n\n2.  Run the following command to scan your fastkafka_project_image:\n\n    ``` cmd\n    trivy image fastkafka_project_image\n    ```\n\n    This command will scan your `fastkafka_project_image` for any\n    vulnerabilities and provide you with a report of its findings.\n\n3.  Fix any vulnerabilities identified by `trivy`. You can do this by\n    updating the vulnerable package to a more secure version or by using\n    a different package altogether.\n\n4.  Rebuild your `fastkafka_project_image` and repeat steps 2 and 3\n    until `trivy` reports no vulnerabilities.\n\nBy using `trivy` to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities.\n\n## Example repo\n\nA `FastKafka` based library which uses above mentioned Dockerfile to\nbuild a docker image can be found\n[here](https://github.com/airtai/sample_fastkafka_project/)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",
    "content": "# Using Redpanda to test FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## What is FastKafka?\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## What is Redpanda?\n\nRedpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda.\n\nFrom [redpanda.com](https://redpanda.com/):\n\n> Redpanda is a Kafka®-compatible streaming data platform that is proven\n> to be 10x faster and 6x lower in total costs. It is also JVM-free,\n> ZooKeeper®-free, Jepsen-tested and source available.\n\nSome of the advantages of Redpanda over Kafka are\n\n1.  A single binary with built-in everything, no ZooKeeper® or JVM\n    needed.\n2.  Costs upto 6X less than Kafka.\n3.  Up to 10x lower average latencies and up to 6x faster Kafka\n    transactions without compromising correctness.\n\nTo learn more about Redpanda, please visit their\n[website](https://redpanda.com/) or checkout this [blog\npost](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\ncomparing Redpanda and Kafka’s performance benchmarks.\n\n## Example repo\n\nA sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n[here](https://github.com/airtai/sample_fastkafka_with_redpanda).\n\n## The process\n\nHere are the steps we’ll be walking through to build our example:\n\n1.  Set up the prerequisites.\n2.  Clone the example repo.\n3.  Explain how to write an application using FastKafka.\n4.  Explain how to write a test case to test FastKafka with Redpanda.\n5.  Run the test case and produce/consume messages.\n\n## 1. Prerequisites\n\nBefore starting, make sure you have the following prerequisites set up:\n\n1.  **Python 3.x**: A Python 3.x installation is required to run\n    FastKafka. You can download the latest version of Python from the\n    [official website](https://www.python.org/downloads/). You’ll also\n    need to have pip installed and updated, which is Python’s package\n    installer.\n2.  **Docker Desktop**: Docker is used to run Redpanda, which is\n    required for testing FastKafka. You can download and install Docker\n    Desktop from the [official\n    website](https://www.docker.com/products/docker-desktop/).\n3.  **Git**: You’ll need to have Git installed to clone the example\n    repo. You can download Git from the [official\n    website](https://git-scm.com/downloads).\n\n## 2. Cloning and setting up the example repo\n\nTo get started with the example code, clone the [GitHub\nrepository](https://github.com/airtai/sample_fastkafka_with_redpanda) by\nrunning the following command in your terminal:\n\n``` cmd\ngit clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n```\n\nThis will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files.\n\n### Create a virtual environment\n\nBefore writing any code, let’s [create a new virtual\nenvironment](https://docs.python.org/3/library/venv.html#module-venv)\nfor our project.\n\nA virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects.\n\nTo create a new virtual environment, run the following commands in your\nterminal:\n\n``` cmd\npython3 -m venv venv\n```\n\nThis will create a new directory called `venv` in your project\ndirectory, which will contain the virtual environment.\n\nTo activate the virtual environment, run the following command:\n\n``` cmd\nsource venv/bin/activate\n```\n\nThis will change your shell’s prompt to indicate that you are now\nworking inside the virtual environment.\n\nFinally, run the following command to upgrade `pip`, the Python package\ninstaller:\n\n``` cmd\npip install --upgrade pip\n```\n\n### Install Python dependencies\n\nNext, let’s install the required Python dependencies. In this guide,\nwe’ll be using `FastKafka` to write our application code and `pytest`\nand `pytest-asyncio` to test it.\n\nYou can install the dependencies from the `requirements.txt` file\nprovided in the cloned repository by running:\n\n``` cmd\npip install -r requirements.txt\n```\n\nThis will install all the required packages and their dependencies.\n\n## 3. Writing server code\n\nThe `application.py` file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker.\n\nNext, an instance of the `FastKafka` class is initialized with the\nminimum required arguments:\n\n- `kafka_brokers`: a dictionary used for generating documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## 4. Writing the test code\n\nThe service can be tested using the `Tester` instance which can be\nconfigured to start a [Redpanda\nbroker](../../api/fastkafka/testing/LocalRedpandaBroker/) for testing\npurposes. The `test.py` file in the cloned repository contains the\nfollowing code for testing.\n\n``` python\nimport pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n    # Start Tester app and create local Redpanda broker for testing\n    async with Tester(kafka_app).using_local_redpanda(\n        tag=\"v23.1.2\", listener_port=9092\n    ) as tester:\n        # Send IrisInputData message to input_data topic\n        await tester.to_input_data(msg)\n\n        # Assert that the kafka_app responded with IrisPrediction in predictions topic\n        await tester.awaited_mocks.on_predictions.assert_awaited_with(\n            IrisPrediction(species=\"setosa\"), timeout=2\n        )\n```\n\nThe `Tester` module utilizes uses `LocalRedpandaBroker` to start and\nstop a Redpanda broker for testing purposes using Docker\n\n## 5. Running the tests\n\nWe can run the tests which is in `test.py` file by executing the\nfollowing command:\n\n``` cmd\npytest test.py\n```\n\nThis will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:\n\n``` cmd\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item                                                                 \n\ntest.py .                                                                  [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n```\n\nRunning the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable.\n\n### Recap\n\nWe have created an Iris classification model and encapulated it into our\n`FastKafka` application. The app will consume the `IrisInputData` from\nthe `input_data` topic and produce the predictions to `predictions`\ntopic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our `Tester` class with `Redpanda` broker which mirrors the\n    developed app topics for testing purposes\n\n3.  Sent `IrisInputData` message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to `IrisInputData` message\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",
    "content": "# Using FastAPI to Run FastKafka Application\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nWhen deploying a FastKafka application, the default approach is to\nutilize the [`fastkafka run`](/docs/cli/fastkafka#fastkafka-run) CLI\ncommand. This command allows you to launch your FastKafka application as\na standalone service. However, if you already have a FastAPI application\nin place and wish to run FastKafka application alongside it, you have an\nalternative option.\n\nFastKafka provides a method called `FastKafka.fastapi_lifespan` that\nleverages [FastAPI’s\nlifespan](https://fastapi.tiangolo.com/advanced/events/#lifespan-events)\nfeature. This method allows you to run your FastKafka application\ntogether with your existing FastAPI app, seamlessly integrating their\nfunctionalities. By using the `FastKafka.fastapi_lifespan` method, you\ncan start the FastKafka application within the same process as the\nFastAPI app.\n\nThe `FastKafka.fastapi_lifespan` method ensures that both FastAPI and\nFastKafka are initialized and start working simultaneously. This\napproach enables the execution of Kafka-related tasks, such as producing\nand consuming messages, while also handling HTTP requests through\nFastAPI’s routes.\n\nBy combining FastAPI and FastKafka in this manner, you can build a\ncomprehensive application that harnesses the power of both frameworks.\nWhether you require real-time messaging capabilities or traditional HTTP\nendpoints, this approach allows you to leverage the strengths of FastAPI\nand FastKafka within a single deployment setup.\n\n## Prerequisites\n\n1.  A basic knowledge of `FastKafka` is needed to proceed with this\n    guide. If you are not familiar with `FastKafka`, please go through\n    the [tutorial](/docs#tutorial) first.\n2.  `FastKafka` and `FastAPI` libraries needs to be installed.\n\nThis guide will provide a step-by-step explanation, taking you through\neach stage individually, before combining all the components in the\nfinal section for a comprehensive understanding of the process.\n\n## 1. Basic FastKafka app\n\nIn this step, we will begin by creating a simple FastKafka application.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Greetings\",\n    kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n    await to_greetings(TestMsg(msg=f\"Hello {msg.msg}\"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n    return greeting\n```\n\nIn the above example, we consume messages from a topic called `names`,\nwe prepend “Hello\" to the message, and send it back to another topic\ncalled `greetings`.\n\nWe now have a simple `FastKafka` app to produce and consume from two\ntopics.\n\n## 2. Using fastapi_lifespan method\n\nIn this step of the guide, we will explore the integration of a\nFastKafka application with a FastAPI application using the\n`FastKafka.fastapi_lifespan` method. The `FastKafka.fastapi_lifespan`\nmethod is a feature provided by FastKafka, which allows you to\nseamlessly integrate a FastKafka application with a FastAPI application\nby leveraging FastAPI’s lifespan feature.\n\n``` python\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name=\"localhost\"))\n\n\n@fastapi_app.get(\"/hello\")\nasync def hello():\n    return {\"msg\": \"hello there\"}\n```\n\nIn the above example, a new instance of the `FastAPI` app is created,\nand when the app is started using uvicorn, it also runs the `FastKafka`\napplication concurrently.\n\n## Putting it all together\n\nLet’s put the above code together and write it in a file called\n`fast_apps.py`.\n\n``` python\n# content of the \"fast_apps.py\" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Greetings\",\n    kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n    await to_greetings(TestMsg(msg=f\"Hello {msg.msg}\"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n    return greeting\n\n\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(\"localhost\"))\n\n@fastapi_app.get(\"/hello\")\nasync def hello():\n    return {\"msg\": \"hello there\"}\n```\n\nFinally, you can run the FastAPI application using a web server of your\nchoice, such as Uvicorn or Hypercorn by running the below command:\n\n``` cmd\nuvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\n```\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/index.md",
    "content": "# FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n<b>Effortless Kafka integration for your web services</b>\n\n------------------------------------------------------------------------\n\n![PyPI](https://img.shields.io/pypi/v/fastkafka.png) ![PyPI -\nDownloads](https://img.shields.io/pypi/dm/fastkafka.png) ![PyPI - Python\nVersion](https://img.shields.io/pypi/pyversions/fastkafka.png)\n\n![GitHub Workflow\nStatus](https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml)\n![CodeQL](https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg)\n![Dependency\nReview](https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg)\n\n![GitHub](https://img.shields.io/github/license/airtai/fastkafka.png)\n\n------------------------------------------------------------------------\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n------------------------------------------------------------------------\n\n#### ⭐⭐⭐ Stay in touch ⭐⭐⭐\n\nPlease show your support and stay in touch by:\n\n- giving our [GitHub repository](https://github.com/airtai/fastkafka/) a\n  star, and\n\n- joining our [Discord server](https://discord.gg/CJWmYpyFbc).\n\nYour support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!\n\n------------------------------------------------------------------------\n\n#### 🐝🐝🐝 We were busy lately 🐝🐝🐝\n\n![Activity](https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg \"Repobeats analytics image\")\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of `fastkafka` with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\nTo install fastkafka with testing features please use:\n\n``` sh\npip install fastkafka[test]\n```\n\nTo install fastkafka with asyncapi docs please use:\n\n``` sh\npip install fastkafka[docs]\n```\n\nTo install fastkafka with all the features please use:\n\n``` sh\npip install fastkafka[test,docs]\n```\n\n## Tutorial\n\nYou can start an interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open in Colab\" />\n</a>\n\n## Writing server code\n\nTo demonstrate FastKafka simplicity of using `@produces` and `@consumes`\ndecorators, we will focus on a simple app.\n\nThe app will consume jsons containig positive floats from one topic, log\nthem and then produce incremented values to another topic.\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines one `Data` mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n`NonNegativeFloat` field `data` that will be logged and “processed\"\nbefore being produced to another topic.\n\nThese message class will be used to parse and validate incoming data in\nKafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker.\n\nNext, an object of the `FastKafka` class is initialized with the minimum\nset of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\nWe will also import and create a logger so that we can log the incoming\ndata in our consuming function.\n\n``` python\nfrom logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger(\"Demo Kafka app\")\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `Data` message class. Specifying the type of the\n  single argument is instructing the Pydantic to use `Data.parse_raw()`\n  on the consumed message before passing it to the user defined function\n  `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_output_data` function,\n  which specifies that this function should produce a message to the\n  “output_data\" Kafka topic whenever it is called. The `to_output_data`\n  function takes a single float argument `data`. It it increments the\n  data returns it wrapped in a `Data` object. The framework will call\n  the `Data.json().encode(\"utf-8\")` function on the returned value and\n  produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\n## Testing the service\n\nThe service can be tested using the `Tester` instances which internally\nstarts InMemory implementation of Kafka broker.\n\nThe Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies.\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = Data(\n    data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send Data message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with incremented data in output_data topic\n    await tester.awaited_mocks.on_output_data.assert_awaited_with(\n        Data(data=1.1), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] Demo Kafka app: Got data: 0.1\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a simple fastkafka application. The app will consume the\n`Data` from the `input_data` topic, log it and produce the incremented\ndata to `output_data` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purposes\n\n3.  Sent Data message to `input_data` topic\n\n4.  Asserted and checked that the developed service has reacted to Data\n    message\n\n## Running the service\n\nThe service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\nTo run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command.\n\n``` sh\nfastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n```\n\nAfter running the command, you should see the following output in your\ncommand line:\n\n    [1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n    [1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n    23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nAsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfollowing command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n    23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\nThis will generate the *asyncapi* folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxr-xr-x 4 root root 4096 May 31 11:38 docs\n    drwxr-xr-x 2 root root 4096 May 31 11:38 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET / HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/global.min.css HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /js/asyncapi-ui.min.js HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/asyncapi.min.css HTTP/1.1\" 200 -\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n\n## License\n\nFastKafka is licensed under the Apache License 2.0\n\nA permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code.\n\nThe full text of the license can be found\n[here](https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/overrides/css/extra.css",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/overrides/js/extra.js",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/overrides/js/math.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.0/overrides/js/mathjax.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/CHANGELOG.md",
    "content": "# Release notes\n\n<!-- do not remove -->\n\n## 0.7.0\n\n### New Features\n\n- Optional description argument to consumes and produces decorator implemented ([#338](https://github.com/airtai/fastkafka/pull/338)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumes and produces decorators now have optional `description` argument that is used instead of function docstring in async doc generation when specified\n\n- FastKafka Windows OS support enabled ([#326](https://github.com/airtai/fastkafka/pull/326)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now run on Windows\n\n- FastKafka and FastAPI integration implemented ([#304](https://github.com/airtai/fastkafka/pull/304)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now be run alongside FastAPI\n\n- Batch consuming option to consumers implemented ([#298](https://github.com/airtai/fastkafka/pull/298)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumers can consume events in batches by specifying msg type of consuming function as `List[YourMsgType]` \n\n- Removed support for synchronous produce functions ([#295](https://github.com/airtai/fastkafka/pull/295)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Added default broker values and update docs ([#292](https://github.com/airtai/fastkafka/pull/292)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n### Bugs Squashed\n\n- Fix index.ipynb to be runnable in colab ([#342](https://github.com/airtai/fastkafka/issues/342))\n\n- Use cli option root_path docs generate and serve CLI commands ([#341](https://github.com/airtai/fastkafka/pull/341)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Fix incorrect asyncapi docs path on fastkafka docs serve command ([#335](https://github.com/airtai/fastkafka/pull/335)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Serve docs now takes app `root_path` argument into consideration when specified in app\n\n- Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps ([#315](https://github.com/airtai/fastkafka/issues/315))\n\n- Fix logs printing timestamps ([#308](https://github.com/airtai/fastkafka/issues/308))\n\n- Fix topics with dots causing failure of tester instantiation ([#306](https://github.com/airtai/fastkafka/pull/306)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Specified topics can now have \".\" in their names\n\n## 0.6.0\n\n### New Features\n\n- Timestamps added to CLI commands ([#283](https://github.com/airtai/fastkafka/pull/283)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Added option to process messages concurrently ([#278](https://github.com/airtai/fastkafka/pull/278)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - A new `executor` option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.\n\n- Add consumes and produces functions to app ([#274](https://github.com/airtai/fastkafka/pull/274)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Add batching for producers ([#273](https://github.com/airtai/fastkafka/pull/273)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Fix broken links in guides ([#272](https://github.com/airtai/fastkafka/pull/272)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Generate the docusaurus sidebar dynamically by parsing summary.md ([#270](https://github.com/airtai/fastkafka/pull/270)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Metadata passed to consumer ([#269](https://github.com/airtai/fastkafka/pull/269)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(key): read the key value somehow..Maybe I missed something in the docs\n    requirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations.\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Contribution with instructions how to build and test added ([#255](https://github.com/airtai/fastkafka/pull/255)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Export encoders, decoders from fastkafka.encoder ([#246](https://github.com/airtai/fastkafka/pull/246)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n- Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. ([#239](https://github.com/airtai/fastkafka/issues/239))\n\n\n- UI Improvement: Post screenshots with links to the actual messages in testimonials section ([#228](https://github.com/airtai/fastkafka/issues/228))\n\n### Bugs Squashed\n\n- Batch testing fix ([#280](https://github.com/airtai/fastkafka/pull/280)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Tester breaks when using Batching or KafkaEvent producers ([#279](https://github.com/airtai/fastkafka/issues/279))\n\n- Consumer loop callbacks are not executing in parallel ([#276](https://github.com/airtai/fastkafka/issues/276))\n\n\n## 0.5.0\n\n### New Features\n\n- Significant speedup of Kafka producer ([#236](https://github.com/airtai/fastkafka/pull/236)), thanks to [@Sternakt](https://github.com/Sternakt)\n \n\n- Added support for AVRO encoding/decoding ([#231](https://github.com/airtai/fastkafka/pull/231)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n### Bugs Squashed\n\n- Fixed sidebar to include guides in docusaurus documentation ([#238](https://github.com/airtai/fastkafka/pull/238)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Fixed link to symbols in docusaurus docs ([#227](https://github.com/airtai/fastkafka/pull/227)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Removed bootstrap servers from constructor ([#220](https://github.com/airtai/fastkafka/pull/220)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n## 0.4.0\n\n### New Features\n\n- Integrate fastkafka chat ([#208](https://github.com/airtai/fastkafka/pull/208)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add benchmarking ([#206](https://github.com/airtai/fastkafka/pull/206)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Enable fast testing without running kafka locally ([#198](https://github.com/airtai/fastkafka/pull/198)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Generate docs using Docusaurus ([#194](https://github.com/airtai/fastkafka/pull/194)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add test cases for LocalRedpandaBroker ([#189](https://github.com/airtai/fastkafka/pull/189)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Reimplement patch and delegates from fastcore ([#188](https://github.com/airtai/fastkafka/pull/188)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Rename existing functions into start and stop and add lifespan handler ([#117](https://github.com/airtai/fastkafka/issues/117))\n  - https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios\n\n\n## 0.3.1\n\n-  README.md file updated\n\n\n## 0.3.0\n\n### New Features\n\n- Guide for fastkafka produces using partition key ([#172](https://github.com/airtai/fastkafka/pull/172)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #161\n\n- Add support for Redpanda for testing and deployment ([#181](https://github.com/airtai/fastkafka/pull/181)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Remove bootstrap_servers from __init__ and use the name of broker as an option when running/testing ([#134](https://github.com/airtai/fastkafka/issues/134))\n\n- Add a GH action file to check for broken links in the docs ([#163](https://github.com/airtai/fastkafka/issues/163))\n\n- Optimize requirements for testing and docs ([#151](https://github.com/airtai/fastkafka/issues/151))\n\n- Break requirements into base and optional for testing and dev ([#124](https://github.com/airtai/fastkafka/issues/124))\n  - Minimize base requirements needed just for running the service.\n\n- Add link to example git repo into guide for building docs using actions ([#81](https://github.com/airtai/fastkafka/issues/81))\n\n- Add logging for run_in_background ([#46](https://github.com/airtai/fastkafka/issues/46))\n\n- Implement partition Key mechanism for producers ([#16](https://github.com/airtai/fastkafka/issues/16))\n\n### Bugs Squashed\n\n- Implement checks for npm installation and version ([#176](https://github.com/airtai/fastkafka/pull/176)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #158 by checking if the npx is installed and more verbose error handling\n\n- Fix the helper.py link in CHANGELOG.md ([#165](https://github.com/airtai/fastkafka/issues/165))\n\n- fastkafka docs install_deps fails ([#157](https://github.com/airtai/fastkafka/issues/157))\n  - Unexpected internal error: [Errno 2] No such file or directory: 'npx'\n\n- Broken links in docs ([#141](https://github.com/airtai/fastkafka/issues/141))\n\n- fastkafka run is not showing up in CLI docs ([#132](https://github.com/airtai/fastkafka/issues/132))\n\n\n## 0.2.3\n\n- Fixed broken links on PyPi index page\n\n\n## 0.2.2\n\n### New Features\n\n- Extract JDK and Kafka installation out of LocalKafkaBroker ([#131](https://github.com/airtai/fastkafka/issues/131))\n\n- PyYAML version relaxed ([#119](https://github.com/airtai/fastkafka/pull/119)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Replace docker based kafka with local ([#68](https://github.com/airtai/fastkafka/issues/68))\n  - [x] replace docker compose with a simple docker run (standard run_jupyter.sh should do)\n  - [x] replace all tests to use LocalKafkaBroker\n  - [x] update documentation\n\n### Bugs Squashed\n\n- Fix broken link for FastKafka docs in index notebook ([#145](https://github.com/airtai/fastkafka/issues/145))\n\n- Fix encoding issues when loading setup.py on windows OS ([#135](https://github.com/airtai/fastkafka/issues/135))\n\n\n## 0.2.0\n\n### New Features\n\n- Replace kafka container with LocalKafkaBroker ([#112](https://github.com/airtai/fastkafka/issues/112))\n  - - [x] Replace kafka container with LocalKafkaBroker in tests\n- [x] Remove kafka container from tests environment\n- [x] Fix failing tests\n\n### Bugs Squashed\n\n- Fix random failing in CI ([#109](https://github.com/airtai/fastkafka/issues/109))\n\n\n## 0.1.3\n\n- version update in __init__.py\n\n\n## 0.1.2\n\n### New Features\n\n\n- Git workflow action for publishing Kafka docs ([#78](https://github.com/airtai/fastkafka/issues/78))\n\n\n### Bugs Squashed\n\n- Include missing requirement ([#110](https://github.com/airtai/fastkafka/issues/110))\n  - [x] Typer is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py) but it is not included in [settings.ini](https://github.com/airtai/fastkafka/blob/main/settings.ini)\n  - [x] Add aiohttp which is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py)\n  - [x] Add nbformat which is imported in _components/helpers.py\n  - [x] Add nbconvert which is imported in _components/helpers.py\n\n\n## 0.1.1\n\n\n### Bugs Squashed\n\n- JDK install fails on Python 3.8 ([#106](https://github.com/airtai/fastkafka/issues/106))\n\n\n\n## 0.1.0\n\nInitial release\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/CNAME",
    "content": "fastkafka.airt.ai\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/CONTRIBUTING.md",
    "content": "# Contributing to fastkafka\n\nFirst off, thanks for taking the time to contribute! ❤️\n\nAll types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉\n\n> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:\n> - Star the project\n> - Tweet about it\n> - Refer this project in your project's readme\n> - Mention the project at local meetups and tell your friends/colleagues\n\n## Table of Contents\n\n- [I Have a Question](#i-have-a-question)\n- [I Want To Contribute](#i-want-to-contribute)\n  - [Reporting Bugs](#reporting-bugs)\n  - [Suggesting Enhancements](#suggesting-enhancements)\n  - [Your First Code Contribution](#your-first-code-contribution)\n- [Development](#development)\n    - [Prepare the dev environment](#prepare-the-dev-environment)\n    - [Way of working](#way-of-working)\n    - [Before a PR](#before-a-pr)\n- [Join The Project Team](#join-the-project-team)\n\n\n\n## I Have a Question\n\n> If you want to ask a question, we assume that you have read the available [Documentation](https://fastkafka.airt.ai/docs).\n\nBefore you ask a question, it is best to search for existing [Issues](https://github.com/airtai/fastkafka/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.\n\nIf you then still feel the need to ask a question and need clarification, we recommend the following:\n\n- Contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new)\n    - Provide as much context as you can about what you're running into\n\nWe will then take care of the issue as soon as possible.\n\n## I Want To Contribute\n\n> ### Legal Notice \n> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.\n\n### Reporting Bugs\n\n#### Before Submitting a Bug Report\n\nA good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.\n\n- Make sure that you are using the latest version.\n- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](https://fastkafka.airt.ai/docs). If you are looking for support, you might want to check [this section](#i-have-a-question)).\n- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/airtai/fastkafkaissues?q=label%3Abug).\n- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.\n- Collect information about the bug:\n  - Stack trace (Traceback)\n  - OS, Platform and Version (Windows, Linux, macOS, x86, ARM)\n  - Python version\n  - Possibly your input and the output\n  - Can you reliably reproduce the issue? And can you also reproduce it with older versions?\n\n#### How Do I Submit a Good Bug Report?\n\nWe use GitHub issues to track bugs and errors. If you run into an issue with the project:\n\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)\n- Explain the behavior you would expect and the actual behavior.\n- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.\n- Provide the information you collected in the previous section.\n\nOnce it's filed:\n\n- The project team will label the issue accordingly.\n- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.\n- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented.\n\n### Suggesting Enhancements\n\nThis section guides you through submitting an enhancement suggestion for fastkafka, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.\n\n#### Before Submitting an Enhancement\n\n- Make sure that you are using the latest version.\n- Read the [documentation](https://fastkafka.airt.ai/docs) carefully and find out if the functionality is already covered, maybe by an individual configuration.\n- Perform a [search](https://github.com/airtai/fastkafka/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.\n- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.\n- If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n\n#### How Do I Submit a Good Enhancement Suggestion?\n\nEnhancement suggestions are tracked as [GitHub issues](https://github.com/airtai/fastkafka/issues).\n\n- Use a **clear and descriptive title** for the issue to identify the suggestion.\n- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.\n- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.\n- **Explain why this enhancement would be useful** to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.\n\n### Your First Code Contribution\n\nA great way to start contributing to FastKafka would be by solving an issue tagged with \"good first issue\". To find a list of issues that are tagged as \"good first issue\" and are suitable for newcomers, please visit the following link: [Good first issues](https://github.com/airtai/fastkafka/labels/good%20first%20issue)\n\nThese issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in [Way of working](#way-of-working) and [Before a PR](#before-a-pr), and help us make FastKafka even better!\n\nIf you have any questions or need further assistance, feel free to reach out to us. Happy coding!\n\n## Development\n\n### Prepare the dev environment\n\nTo start contributing to fastkafka, you first have to prepare the development environment.\n\n#### Clone the fastkafka repository\n\nTo clone the repository, run the following command in the CLI:\n\n```shell\ngit clone https://github.com/airtai/fastkafka.git\n```\n\n#### Optional: create a virtual python environment\n\nTo prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:\n\n```shell\npython3 -m venv fastkafka-env\n```\n\nAnd to activate your virtual environment run:\n\n```shell\nsource fastkafka-env/bin/activate\n```\n\nTo learn more about virtual environments, please have a look at [official python documentation](https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available.)\n\n#### Install fastkafka\n\nTo install fastkafka, navigate to the root directory of the cloned fastkafka project and run:\n\n```shell\npip install fastkafka -e [.\"dev\"]\n```\n\n#### Install JRE and Kafka toolkit\n\nTo be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:\n\n1. Use our `fastkafka testing install-deps` CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR\n2. Install JRE and Kafka manually.\n   To do this, please refer to [JDK and JRE installation guide](https://docs.oracle.com/javase/9/install/toc.htm) and [Apache Kafka quickstart](https://kafka.apache.org/quickstart)\n   \n#### Install npm\n\nTo be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:\n\n1. Use our `fastkafka docs install_deps` CLI command which will install npm for you in your .local folder\nOR\n2. Install npm manually.\n   To do this, please refer to [NPM installation guide](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)\n   \n#### Install docusaurus\n\nTo generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project.\n\n#### Check if everything works\n\nAfter installing fastkafka and all the necessary dependencies, run `nbdev_test` in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a \"Success.\" message in your terminal, otherwise please refer to previous steps.\n\n### Way of working\n\nThe development of fastkafka is done in Jupyter notebooks. Inside the `nbs` directory you will find all the source code of fastkafka, this is where you will implement your changes.\n\nThe testing, cleanup and exporting of the code is being handled by `nbdev`, please, before starting the work on fastkafka, get familiar with it by reading [nbdev documentation](https://nbdev.fast.ai/getting_started.html).\n\nThe general philosopy you should follow when writing code for fastkafka is:\n\n- Function should be an atomic functionality, short and concise\n   - Good rule of thumb: your function should be 5-10 lines long usually\n- If there are more than 2 params, enforce keywording using *\n   - E.g.: `def function(param1, *, param2, param3): ...`\n- Define typing of arguments and return value\n   - If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected\n- After the function cell, write test cells using the assert keyword\n   - Whenever you implement something you should test that functionality immediately in the cells below \n- Add Google style python docstrings when function is implemented and tested\n\n### Before a PR\n\nAfter you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):\n\n1. Format your notebooks: `nbqa black nbs`\n2. Close, shutdown, and clean the metadata from your notebooks: `nbdev_clean`\n3. Export your code: `nbdev_export`\n4. Run the tests: `nbdev_test`\n5. Test code typing: `mypy fastkafka`\n6. Test code safety with bandit: `bandit -r fastkafka`\n7. Test code safety with semgrep: `semgrep --config auto -r fastkafka`\n\nWhen you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.\n\n## Attribution\nThis guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/LICENSE.md",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/EventMetadata.md",
    "content": "## `fastkafka.EventMetadata` {#fastkafka.EventMetadata}\n\n\nA class for encapsulating Kafka record metadata.\n\n**Parameters**:\n- `topic`: The topic this record is received from\n- `partition`: The partition from which this record is received\n- `offset`: The position of this record in the corresponding Kafka partition\n- `timestamp`: The timestamp of this record\n- `timestamp_type`: The timestamp type of this record\n- `key`: The key (or `None` if no key is specified)\n- `value`: The value\n- `serialized_key_size`: The size of the serialized, uncompressed key in bytes\n- `serialized_value_size`: The size of the serialized, uncompressed value in bytes\n- `headers`: The headers\n\n### `create_event_metadata` {#create_event_metadata}\n\n`def create_event_metadata(record: aiokafka.structs.ConsumerRecord) -> EventMetadata`\n\nCreates an instance of EventMetadata from a ConsumerRecord.\n\n**Parameters**:\n- `record`: The Kafka ConsumerRecord.\n\n**Returns**:\n- The created EventMetadata instance.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/FastKafka.md",
    "content": "## `fastkafka.FastKafka` {#fastkafka.FastKafka}\n\n### `__init__` {#init}\n\n`def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Optional[Dict[str, Any]] = None, root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None`\n\nCreates FastKafka application\n\n**Parameters**:\n- `title`: optional title for the documentation. If None,\nthe title will be set to empty string\n- `description`: optional description for the documentation. If\nNone, the description will be set to empty string\n- `version`: optional version for the documentation. If None,\nthe version will be set to empty string\n- `contact`: optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'\n- `kafka_brokers`: dictionary describing kafka brokers used for setting\nthe bootstrap server when running the applicationa and for\ngenerating documentation. Defaults to\n    {\n        \"localhost\": {\n            \"url\": \"localhost\",\n            \"description\": \"local kafka broker\",\n            \"port\": \"9092\",\n        }\n    }\n- `root_path`: path to where documentation will be created\n- `lifespan`: asynccontextmanager that is used for setting lifespan hooks.\n__aenter__ is called before app start and __aexit__ after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:`async with kafka_app...`\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,\n``SASL_SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.1', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `executor`: Type of executor to choose for consuming tasks. Avaliable options\nare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\n\"SequentialExecutor\" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n\"DynamicTaskExecutor\" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the consuming function async docs.\nIf not provided, consuming function __doc__ attr will be used.\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,\n``SASL_SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_docs` {#create_docs}\n\n`def create_docs(self: fastkafka.FastKafka) -> None`\n\nCreate the asyncapi documentation based on the configured consumers and producers.\n\nThis function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers.\n\nNote:\n    The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\n    attribute of the FastKafka instance.\n\n**Returns**:\n- None\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `fastapi_lifespan` {#fastapi_lifespan}\n\n`def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]`\n\nMethod for managing the lifespan of a FastAPI application with a specific Kafka broker.\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- Lifespan function to use for initializing FastAPI\n\n### `get_topics` {#get_topics}\n\n`def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]`\n\nGet all topics for both producing and consuming.\n\n**Returns**:\n- A set of topics for both producing and consuming.\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the producing function async docs.\nIf not provided, producing function __doc__ attr will be used.\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,\n``SASL_SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n### `set_kafka_broker` {#set_kafka_broker}\n\n`def set_kafka_broker(self, kafka_broker_name: str) -> None`\n\nSets the Kafka broker to start FastKafka with\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- None\n\n**Exceptions**:\n- `ValueError`: If the provided kafka_broker_name is not found in dictionary of kafka_brokers\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/KafkaEvent.md",
    "content": "## `fastkafka.KafkaEvent` {#fastkafka.KafkaEvent}\n\n\nA generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel\n\n**Parameters**:\n- `message`: The message contained in the Kafka event, can be of type pydantic.BaseModel.\n- `key`: The optional key used to identify the Kafka event.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/encoder/AvroBase.md",
    "content": "## `fastkafka.encoder.AvroBase` {#fastkafka.encoder.AvroBase}\n\n\nThis is base pydantic class that will add some methods\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/encoder/avro_decoder.md",
    "content": "## `fastkafka.encoder.avro_decoder` {#fastkafka.encoder.avro_decoder}\n\n### `avro_decoder` {#avro_decoder}\n\n`def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any`\n\nDecoder to decode avro encoded messages to pydantic model instance\n\n**Parameters**:\n- `raw_msg`: Avro encoded bytes message received from Kafka topic\n- `cls`: Pydantic class; This pydantic class will be used to construct instance of same class\n\n**Returns**:\n- An instance of given pydantic class\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/encoder/avro_encoder.md",
    "content": "## `fastkafka.encoder.avro_encoder` {#fastkafka.encoder.avro_encoder}\n\n### `avro_encoder` {#avro_encoder}\n\n`def avro_encoder(msg: pydantic.main.BaseModel) -> bytes`\n\nEncoder to encode pydantic instances to avro message\n\n**Parameters**:\n- `msg`: An instance of pydantic basemodel\n\n**Returns**:\n- A bytes message which is encoded from pydantic basemodel\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/encoder/avsc_to_pydantic.md",
    "content": "## `fastkafka.encoder.avsc_to_pydantic` {#fastkafka.encoder.avsc_to_pydantic}\n\n### `avsc_to_pydantic` {#avsc_to_pydantic}\n\n`def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass`\n\nGenerate pydantic model from given Avro Schema\n\n**Parameters**:\n- `schema`: Avro schema in dictionary format\n\n**Returns**:\n- Pydantic model class built from given avro schema\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/encoder/json_decoder.md",
    "content": "## `fastkafka.encoder.json_decoder` {#fastkafka.encoder.json_decoder}\n\n### `json_decoder` {#json_decoder}\n\n`def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any`\n\nDecoder to decode json string in bytes to pydantic model instance\n\n**Parameters**:\n- `raw_msg`: Bytes message received from Kafka topic\n- `cls`: Pydantic class; This pydantic class will be used to construct instance of same class\n\n**Returns**:\n- An instance of given pydantic class\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/encoder/json_encoder.md",
    "content": "## `fastkafka.encoder.json_encoder` {#fastkafka.encoder.json_encoder}\n\n### `json_encoder` {#json_encoder}\n\n`def json_encoder(msg: pydantic.main.BaseModel) -> bytes`\n\nEncoder to encode pydantic instances to json string\n\n**Parameters**:\n- `msg`: An instance of pydantic basemodel\n\n**Returns**:\n- Json string in bytes which is encoded from pydantic basemodel\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/executors/DynamicTaskExecutor.md",
    "content": "## `fastkafka.executors.DynamicTaskExecutor` {#fastkafka.executors.DynamicTaskExecutor}\n\n\nA class that implements a dynamic task executor for processing consumer records.\n\nThe DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task.\n\n### `__init__` {#init}\n\n`def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None`\n\nCreate an instance of DynamicTaskExecutor\n\n**Parameters**:\n- `throw_exceptions`: Flag indicating whether exceptions should be thrown ot logged.\nDefaults to False.\n- `max_buffer_size`: Maximum buffer size for the memory object stream.\nDefaults to 100_000.\n- `size`: Size of the task pool. Defaults to 100_000.\n\n### `run` {#run}\n\n`def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None`\n\nRuns the dynamic task executor.\n\n**Parameters**:\n- `is_shutting_down_f`: Function to check if the executor is shutting down.\n- `generator`: Generator function for retrieving consumer records.\n- `processor`: Processor function for processing consumer records.\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/executors/SequentialExecutor.md",
    "content": "## `fastkafka.executors.SequentialExecutor` {#fastkafka.executors.SequentialExecutor}\n\n\nA class that implements a sequential executor for processing consumer records.\n\nThe SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines.\n\n### `__init__` {#init}\n\n`def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None`\n\nCreate an instance of SequentialExecutor\n\n**Parameters**:\n- `throw_exceptions`: Flag indicating whether exceptions should be thrown or logged.\nDefaults to False.\n- `max_buffer_size`: Maximum buffer size for the memory object stream.\nDefaults to 100_000.\n\n### `run` {#run}\n\n`def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None`\n\nRuns the sequential executor.\n\n**Parameters**:\n- `is_shutting_down_f`: Function to check if the executor is shutting down.\n- `generator`: Generator function for retrieving consumer records.\n- `processor`: Processor function for processing consumer records.\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/testing/ApacheKafkaBroker.md",
    "content": "## `fastkafka.testing.ApacheKafkaBroker` {#fastkafka.testing.ApacheKafkaBroker}\n\n\nApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nInitialises the ApacheKafkaBroker object\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `get_service_config_string` {#get_service_config_string}\n\n`def get_service_config_string(self: fastkafka.testing.ApacheKafkaBroker, service: str, data_dir: pathlib.Path) -> str`\n\nGets the configuration string for a service.\n\n**Parameters**:\n- `service`: Name of the service (\"kafka\" or \"zookeeper\").\n- `data_dir`: Path to the directory where the service will save data.\n\n**Returns**:\n- The service configuration string.\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.ApacheKafkaBroker) -> str`\n\nStarts a local Kafka broker and ZooKeeper instance synchronously.\n\n**Returns**:\n- The Kafka broker bootstrap server address in string format: host:port.\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None`\n\nStops a local kafka broker and zookeeper instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/testing/LocalRedpandaBroker.md",
    "content": "## `fastkafka.testing.LocalRedpandaBroker` {#fastkafka.testing.LocalRedpandaBroker}\n\n\nLocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.\n\n### `__init__` {#init}\n\n`def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None`\n\nInitialises the LocalRedpandaBroker object\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n### `get_service_config_string` {#get_service_config_string}\n\n`def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str`\n\nGenerates a configuration for a service\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `service`: \"redpanda\", defines which service to get config string for\n\n### `start` {#start}\n\n`def start(self: fastkafka.testing.LocalRedpandaBroker) -> str`\n\nStarts a local redpanda broker instance synchronously\n\n**Returns**:\n- Redpanda broker bootstrap server address in string format: add:port\n\n### `stop` {#stop}\n\n`def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None`\n\nStops a local redpanda broker instance synchronously\n\n**Returns**:\n- None\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/api/fastkafka/testing/Tester.md",
    "content": "## `fastkafka.testing.Tester` {#fastkafka.testing.Tester}\n\n### `__init__` {#init}\n\n`def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None`\n\nMirror-like object for testing a FastFafka application\n\nCan be used as context manager\n\n**Parameters**:\n- `app`: The FastKafka application to be tested.\n- `broker`: An optional broker to start and to use for testing.\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n### `benchmark` {#benchmark}\n\n`def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]`\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n- `interval`: Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second\n- `sliding_window_size`: The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated\n\n### `consumes` {#consumes}\n\n`def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.1', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]`\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix\n- `decoder`: Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function.\n- `executor`: Type of executor to choose for consuming tasks. Avaliable options\nare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\n\"SequentialExecutor\" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n\"DynamicTaskExecutor\" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.\n- `prefix`: Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: \"on_\". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the consuming function async docs.\nIf not provided, consuming function __doc__ attr will be used.\n- `*topics`: optional list of topics to subscribe to. If not set,\ncall :meth:`.subscribe` or :meth:`.assign` before consuming records.\nPassing topics directly is same as calling :meth:`.subscribe` API.\n- `bootstrap_servers`: a ``host[:port]`` string (or list of\n``host[:port]`` strings) that the consumer should contact to bootstrap\ninitial cluster metadata.\n\nThis does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\nfor logging with respect to consumer group administration. Default:\n``aiokafka-{version}``\n- `group_id`: name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None\n- `key_deserializer`: Any callable that takes a\nraw message key and returns a deserialized key.\n- `value_deserializer`: Any callable that takes a\nraw message value and returns a deserialized value.\n- `fetch_min_bytes`: Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n`fetch_max_wait_ms` for more data to accumulate. Default: 1.\n- `fetch_max_bytes`: The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n- `fetch_max_wait_ms`: The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500.\n- `max_partition_fetch_bytes`: The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ``= #partitions * max_partition_fetch_bytes``.\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576.\n- `max_poll_records`: The maximum number of records returned in a\nsingle call to :meth:`.getmany`. Defaults ``None``, no limit.\n- `request_timeout_ms`: Client request timeout in milliseconds.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `auto_offset_reset`: A policy for resetting offsets on\n:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\navailable message, ``latest`` will move to the most recent, and\n``none`` will raise an exception so you can handle this case.\nDefault: ``latest``.\n- `enable_auto_commit`: If true the consumer's offset will be\nperiodically committed in the background. Default: True.\n- `auto_commit_interval_ms`: milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000.\n- `check_crcs`: Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `partition_assignment_strategy`: List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n- `max_poll_interval_ms`: Maximum allowed time between calls to\nconsume messages (e.g., :meth:`.getmany`). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See `KIP-62`_ for more\ninformation. Default 300000\n- `rebalance_timeout_ms`: The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to `max.poll.interval.ms` configuration,\nbut as ``aiokafka`` will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\nto ``session_timeout_ms``\n- `session_timeout_ms`: Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(`heartbeat.interval.ms`) to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe **broker** configuration properties\n`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\nDefault: 10000\n- `heartbeat_interval_ms`: The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than `session_timeout_ms`, but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000\n- `consumer_timeout_ms`: maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200\n- `api_version`: specify which kafka API version to use.\n:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,\n``SASL_SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more information see\n:ref:`ssl_auth`. Default: None.\n- `exclude_internal_topics`: Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying `None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `isolation_level`: Controls how to read messages written\ntransactionally.\n\nIf set to ``read_committed``, :meth:`.getmany` will only return\ntransactional messages which have been committed.\nIf set to ``read_uncommitted`` (the default), :meth:`.getmany` will\nreturn all messages, even transactional messages which have been\naborted.\n\nNon-transactional messages will be returned unconditionally in\neither mode.\n\nMessages will always be returned in offset order. Hence, in\n`read_committed` mode, :meth:`.getmany` will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, `read_committed` consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in `read_committed` the seek_to_end method will\nreturn the LSO. See method docs below. Default: ``read_uncommitted``\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: None\n- `sasl_oauth_token_provider`: OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\nDefault: None\n\n**Returns**:\n- : A function returning the same function\n\n### `create_docs` {#create_docs}\n\n`def create_docs(self: fastkafka.FastKafka) -> None`\n\nCreate the asyncapi documentation based on the configured consumers and producers.\n\nThis function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers.\n\nNote:\n    The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\n    attribute of the FastKafka instance.\n\n**Returns**:\n- None\n\n### `create_mocks` {#create_mocks}\n\n`def create_mocks(self: fastkafka.FastKafka) -> None`\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### `fastapi_lifespan` {#fastapi_lifespan}\n\n`def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]`\n\nMethod for managing the lifespan of a FastAPI application with a specific Kafka broker.\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- Lifespan function to use for initializing FastAPI\n\n### `get_topics` {#get_topics}\n\n`def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]`\n\nGet all topics for both producing and consuming.\n\n**Returns**:\n- A set of topics for both producing and consuming.\n\n### `produces` {#produces}\n\n`def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]`\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n- `topic`: Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix.\n- `encoder`: Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function.\n- `prefix`: Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: \"to_\". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError\n- `brokers`: Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka.\n- `description`: Optional description of the producing function async docs.\nIf not provided, producing function __doc__ attr will be used.\n- `bootstrap_servers`: a ``host[:port]`` string or list of\n``host[:port]`` strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list.  It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ``localhost:9092``.\n- `client_id`: a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ``aiokafka-producer-#`` (appended with a unique number\nper instance)\n- `key_serializer`: used to convert user-supplied keys to bytes\nIf not :data:`None`, called as ``f(key),`` should return\n:class:`bytes`.\nDefault: :data:`None`.\n- `value_serializer`: used to convert user-supplied message\nvalues to :class:`bytes`. If not :data:`None`, called as\n``f(value)``, should return :class:`bytes`.\nDefault: :data:`None`.\n- `acks`: one of ``0``, ``1``, ``all``. The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:\n\n* ``0``: Producer will not wait for any acknowledgment from the server\n  at all. The message will immediately be added to the socket\n  buffer and considered sent. No guarantee can be made that the\n  server has received the record in this case, and the retries\n  configuration will not take effect (as the client won't\n  generally know of any failures). The offset given back for each\n  record will always be set to -1.\n* ``1``: The broker leader will write the record to its local log but\n  will respond without awaiting full acknowledgement from all\n  followers. In this case should the leader fail immediately\n  after acknowledging the record but before the followers have\n  replicated it then the record will be lost.\n* ``all``: The broker leader will wait for the full set of in-sync\n  replicas to acknowledge the record. This guarantees that the\n  record will not be lost as long as at least one in-sync replica\n  remains alive. This is the strongest available guarantee.\n\nIf unset, defaults to ``acks=1``. If `enable_idempotence` is\n:data:`True` defaults to ``acks=all``\n- `compression_type`: The compression type for all data generated by\nthe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\nor :data:`None`.\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:`None`.\n- `max_batch_size`: Maximum size of buffered data per partition.\nAfter this amount :meth:`send` coroutine will block until batch is\ndrained.\nDefault: 16384\n- `linger_ms`: The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan `linger_ms`, producer will wait ``linger_ms - process_time``.\nDefault: 0 (i.e. no delay).\n- `partitioner`: Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n``partitioner(key_bytes, all_partitions, available_partitions)``.\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:`None`, the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible).\n- `max_request_size`: The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576.\n- `metadata_max_age_ms`: The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000\n- `request_timeout_ms`: Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\ncall), maximum waiting time can be up to ``2 *\nrequest_timeout_ms``.\nDefault: 40000.\n- `retry_backoff_ms`: Milliseconds to backoff when retrying on\nerrors. Default: 100.\n- `api_version`: specify which kafka API version to use.\nIf set to ``auto``, will attempt to infer the broker version by\nprobing various APIs. Default: ``auto``\n- `security_protocol`: Protocol used to communicate with brokers.\nValid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,\n``SASL_SSL``. Default: ``PLAINTEXT``.\n- `ssl_context`: pre-configured :class:`~ssl.SSLContext`\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:`~asyncio.loop.create_connection`. For more\ninformation see :ref:`ssl_auth`.\nDefault: :data:`None`\n- `connections_max_idle_ms`: Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:`None` will\ndisable idle checks. Default: 540000 (9 minutes).\n- `enable_idempotence`: When set to :data:`True`, the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:`False`, producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ``all``. If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:`ValueError` will be thrown.\nNew in version 0.5.0.\n- `sasl_mechanism`: Authentication mechanism when security_protocol\nis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\nare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n``OAUTHBEARER``.\nDefault: ``PLAIN``\n- `sasl_plain_username`: username for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_plain_password`: password for SASL ``PLAIN`` authentication.\nDefault: :data:`None`\n- `sasl_oauth_token_provider (`: class:`~aiokafka.abc.AbstractTokenProvider`):\nOAuthBearer token provider instance. (See\n:mod:`kafka.oauth.abstract`).\nDefault: :data:`None`\n\n**Returns**:\n- : A function returning the same function\n\n**Exceptions**:\n- `ValueError`: when needed\n\n### `run_in_background` {#run_in_background}\n\n`def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]`\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n- A decorator function that takes a background task as an input and stores it to be run in the backround.\n\n### `set_kafka_broker` {#set_kafka_broker}\n\n`def set_kafka_broker(self, kafka_broker_name: str) -> None`\n\nSets the Kafka broker to start FastKafka with\n\n**Parameters**:\n- `kafka_broker_name`: The name of the Kafka broker to start FastKafka\n\n**Returns**:\n- None\n\n**Exceptions**:\n- `ValueError`: If the provided kafka_broker_name is not found in dictionary of kafka_brokers\n\n### `using_local_kafka` {#using_local_kafka}\n\n`def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester`\n\nStarts local Kafka broker used by the Tester instance\n\n**Parameters**:\n- `data_dir`: Path to the directory where the zookeepeer instance will save data\n- `zookeeper_port`: Port for clients (Kafka brokes) to connect\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n\n**Returns**:\n- An instance of tester with Kafka as broker\n\n### `using_local_redpanda` {#using_local_redpanda}\n\n`def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester`\n\nStarts local Redpanda broker used by the Tester instance\n\n**Parameters**:\n- `listener_port`: Port on which the clients (producers and consumers) can connect\n- `tag`: Tag of Redpanda image to use to start container\n- `seastar_core`: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n- `memory`: The amount of memory to make available to Redpanda\n- `mode`: Mode to use to load configuration properties in container\n- `default_log_level`: Log levels to use for Redpanda\n\n**Returns**:\n- An instance of tester with Redpanda as broker\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/cli/fastkafka.md",
    "content": "# `fastkafka`\n\n**Usage**:\n\n```console\n$ fastkafka [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `docs`: Commands for managing fastkafka app...\n* `run`: Runs Fast Kafka API application\n* `testing`: Commands for managing fastkafka testing\n\n## `fastkafka docs`\n\nCommands for managing fastkafka app documentation\n\n**Usage**:\n\n```console\n$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `generate`: Generates documentation for a FastKafka...\n* `install_deps`: Installs dependencies for FastKafka...\n* `serve`: Generates and serves documentation for a...\n\n### `fastkafka docs generate`\n\nGenerates documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs generate [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created; default is current directory\n* `--help`: Show this message and exit.\n\n### `fastkafka docs install_deps`\n\nInstalls dependencies for FastKafka documentation generation\n\n**Usage**:\n\n```console\n$ fastkafka docs install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n### `fastkafka docs serve`\n\nGenerates and serves documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs serve [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created; default is current directory\n* `--bind TEXT`: Some info  [default: 127.0.0.1]\n* `--port INTEGER`: Some info  [default: 8000]\n* `--help`: Show this message and exit.\n\n## `fastkafka run`\n\nRuns Fast Kafka API application\n\n**Usage**:\n\n```console\n$ fastkafka run [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--num-workers INTEGER`: Number of FastKafka instances to run, defaults to number of CPU cores.  [default: 64]\n* `--kafka-broker TEXT`: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.  [default: localhost]\n* `--help`: Show this message and exit.\n\n## `fastkafka testing`\n\nCommands for managing fastkafka testing\n\n**Usage**:\n\n```console\n$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `install_deps`: Installs dependencies for FastKafka app...\n\n### `fastkafka testing install_deps`\n\nInstalls dependencies for FastKafka app testing\n\n**Usage**:\n\n```console\n$ fastkafka testing install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/cli/run_fastkafka_server_process.md",
    "content": "# `run_fastkafka_server_process`\n\n**Usage**:\n\n```console\n$ run_fastkafka_server_process [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--kafka-broker TEXT`: Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.  [required]\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_00_FastKafka_Demo.md",
    "content": "# FastKafka tutorial\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\n``` python\ntry:\n    import fastkafka\nexcept:\n    ! pip install fastkafka\n```\n\n## Running in Colab\n\nYou can start this interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\" />\n</a>\n\n## Writing server code\n\nHere is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic.\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/usage/types/#constrained-types),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server.\n\nNext, an object of the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstances which internally starts Kafka broker and zookeeper.\n\nBefore running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:\n\n``` sh\nfastkafka testing install_deps\n```\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send IrisInputData message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with IrisPrediction in predictions topic\n    await tester.awaited_mocks.on_predictions.assert_awaited_with(\n        IrisPrediction(species=\"setosa\"), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purpuoses\n\n3.  Sent IrisInputData message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to IrisInputData message\n\n## Running the service\n\nThe service can be started using builtin `faskafka run` CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nTo run the service, you will need a running Kafka broker on localhost as\nspecified in the `kafka_brokers` parameter above. We can start the Kafka\nbroker locally using the\n[`ApacheKafkaBroker`](../api/fastkafka/testing/ApacheKafkaBroker.md/#fastkafka.testing.ApacheKafkaBroker).\nNotice that the same happens automatically in the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\nas shown above.\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n    '127.0.0.1:9092'\n\nThen, we start the FastKafka service by running the following command in\nthe folder where the `application.py` file is located:\n\n``` sh\nfastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n```\n\nIn the above command, we use `--num-workers` option to specify how many\nworkers to launch and we use `--kafka-broker` option to specify which\nkafka broker configuration to use from earlier specified `kafka_brokers`\n\n    [1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n    [1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n    ^C\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n\nYou need to interupt running of the cell above by selecting\n`Runtime->Interupt execution` on the toolbar above.\n\nFinally, we can stop the local Kafka Broker:\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nWhen running in Colab, we need to update Node.js first:\n\nWe need to install all dependancies for the generator using the\nfollowing command line:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfolloving command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n. This will generate the *asyncapi* folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\n    drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    ^C\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    bootstrap_servers=\"localhost:9092\",\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_01_Intro.md",
    "content": "# Intro\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nThis tutorial will show you how to use <b>FastKafkaAPI</b>, step by\nstep.\n\nThe goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel.\n\nIn this Intro tutorial we’ll go trough the basic requirements to run the\ndemos presented in future steps.\n\n## Installing FastKafkaAPI\n\nFirst step is to install FastKafkaAPI\n\n``` shell\n$ pip install fastkafka\n```\n\n## Preparing a Kafka broker\n\nNext step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication.\n\n!!! info \"Hey, your first info!\"\n\n    If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n\nTo go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times).\n\n!!! warning \"Listen! This is important.\"\n\n    To be able to setup this configuration you need to have Docker and docker-compose installed\n\n    See here for more info on <a href = \\\"https://docs.docker.com/\\\" target=\\\"_blank\\\">Docker</a> and <a href = \\\"https://docs.docker.com/compose/install/\\\" target=\\\"_blank\\\">docker compose</a>\n\nTo setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g. fastkafka_demo). Inside the new\nfolder create a new YAML file named <b>kafka_demo.yml</b> and copy the\nfollowing configuration into it:\n\n``` yaml\nversion: \"3\"\nservices:\n    zookeeper:\n        image: wurstmeister/zookeeper\n        hostname: zookeeper\n        container_name: zookeeper\n        networks:\n          - fastkafka-network\n        ports:\n          - \"2181:2181\"\n          - \"22:22\"\n          - \"2888:2888\"\n          - \"3888:3888\"\n    kafka:\n        image: wurstmeister/kafka\n        container_name: kafka\n        ports:\n          - \"9093:9093\"\n        environment:\n            HOSTNAME_COMMAND: \"docker info | grep ^Name: | cut -d' ' -f 2\"\n            KAFKA_ZOOKEEPER_CONNECT: \"zookeeper:2181\"\n            KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n            KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n            KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n            KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n            KAFKA_CREATE_TOPICS: \"hello:1:1\"\n        volumes:\n            - /var/run/docker.sock:/var/run/docker.sock\n        depends_on:\n            - zookeeper\n        healthcheck:\n            test: [ \"CMD\", \"kafka-topics.sh\", \"--list\", \"--zookeeper\", \"zookeeper:2181\" ]\n            interval: 5s\n            timeout: 10s\n            retries: 5\n        networks:\n          - fastkafka-network\nnetworks:\n    fastkafka-network:\n        name: \"fastkafka-network\"\n```\n\nThis configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a ‘hello’ topic (quite enough for a\nstart). To start the configuration, run:\n\n``` shell\n$ docker-compose -f kafka_demo.yaml up -d --wait\n```\n\nThis will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! :confetti_ball:\n\n## Running the code\n\nAfter installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the ‘First Steps’ part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem.\n\nYou are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_02_First_Steps.md",
    "content": "# First Steps\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Creating a simple Kafka consumer app\n\nFor our first demo we will create the simplest possible Kafka consumer\nand run it using ‘fastkafka run’ command.\n\nThe consumer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n\n2.  Listen to the hello topic\n\n3.  Write any message received from the hello topic to stdout\n\nTo create the consumer, first, create a file named\n<b>hello_kafka_consumer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n    \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n    print(f\"Got data, msg={msg.msg}\", flush=True)\n```\n\n!!! info \"Kafka configuration\"\n\n    This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\n!!! warning \"Remember to flush\"\n\n    Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n\nTo run this consumer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n    [878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n\nNow you can interact with your consumer, by sending the messages to the\nsubscribed ‘hello’ topic, don’t worry, we will cover this in the next\nstep of this guide.\n\n## Sending first message to your consumer\n\nAfter we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly.\n\nIf you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic.\n\nFirst, connect to your running kafka broker by running:\n\n``` shell\ndocker run -it kafka /bin/bash\n```\n\nThen, when connected to the container, run:\n\n``` shell\nkafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n```\n\nThis will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer.\n\nIn the shell, type:\n\n``` shell\n{\"msg\":\"hello\"}\n```\n\nand press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout.\n\nCheck the output of your consumer (terminal where you ran the ‘fastkafka\nrun’ command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:\n\n``` shell\nGot data, msg=hello\n```\n\n## Creating a hello Kafka producer\n\nConsuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let’s create our first\nkafka producer which will send it’s greetings to our consumer\nperiodically.\n\nThe producer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n2.  Connect to the hello topic\n3.  Periodically send a message to the hello world topic\n\nTo create the producer, first, create a file named\n<b>hello_kafka_producer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n    logger.info(f\"Producing: {msg}\")\n    return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello(HelloKafkaMsg(msg=\"hello\"))\n        await asyncio.sleep(1)\n```\n\n!!! info \"Kafka configuration\"\n\n    This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\nTo run this producer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n\nNow, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log.\n\n## Recap\n\nIn this guide we have:\n\n1.  Created a simple Kafka consumer using FastKafka\n2.  Sent a message to our consumer trough Kafka\n3.  Created a simple Kafka producer using FastKafka\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_03_Authentication.md",
    "content": "# Authentication\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## TLS Authentication\n\nsasl_mechanism (str) – Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN\n\nsasl_plain_username (str) – username for SASL PLAIN authentication.\nDefault: None\n\nsasl_plain_password (str) – password for SASL PLAIN authentication.\nDefault: None\n\nsasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_04_Github_Actions_Workflow.md",
    "content": "# Deploy FastKafka docs to GitHub Pages\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Getting started\n\nAdd your workflow file `.github/workflows/fastkafka_docs_deploy.yml` and\npush it to your remote default branch.\n\nHere is an example workflow:\n\n``` yaml\nname: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n  push:\n    branches: [ \"main\", \"master\" ]\n  workflow_dispatch:\n\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n      - uses: airtai/workflows/fastkafka-ghp@main\n        with:\n          app: \"test_fastkafka.application:kafka_app\"\n```\n\n## Options\n\n### Set app location\n\nInput in the form of `path:app`, where `path` is the path to a Python\nfile and `app` is an object of type\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka):\n\n``` yaml\n- name: Deploy\n  uses: airtai/workflows/fastkafka-ghp@main\n  with:\n    app: \"test_fastkafka.application:kafka_app\"\n```\n\nIn the above example,\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp is named as `kafka_app` and it is available in the `application`\nsubmodule of the `test_fastkafka` module.\n\n## Example Repository\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to `Github Pages` can be found\n[here](https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_05_Lifespan_Handler.md",
    "content": "# Lifespan Events\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nDid you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! :rocket:\n\nLets break it down:\n\nYou can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages.\n\nSimilarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up.\n\nBy executing code before consuming and after producing, you cover the\nentire lifecycle of your application :tada:\n\nThis is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!\n\nSo lets give it a try and see how it can make your Kafka app even more\nawesome! :muscle:\n\n## Lifespan example - Iris prediction model\n\nLet’s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don’t want to load them for every\nmessage.\n\nHere’s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we’ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication.\n\n### Lifespan\n\nYou can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager.\n\nLet’s start with an example and then see it in detail.\n\nWe create an async function lifespan() with yield like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \n```\n\nThe first thing to notice, is that we are defining an async function\nwith `yield`. This is very similar to Dependencies with `yield`.\n\nThe first part of the function, before the `yield`, will be executed\n**before** the application starts. And the part after the `yield` will\nbe executed **after** the application has finished.\n\nThis lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown.\n\nThe lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup.\n\nFor demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called.\n\n### Async context manager\n\nContext managers can be used in `with` blocks, our lifespan, for example\ncould be used like this:\n\n``` python\nml_models = {}\nasync with lifespan(None):\n    print(ml_models)\n```\n\nWhen you create a context manager or an async context manager, what it\ndoes is that, before entering the `with` block, it will execute the code\nbefore the `yield`, and after exiting the `with` block, it will execute\nthe code after the `yield`.\n\nIf you want to learn more about context managers and contextlib\ndecorators, please visit [Python official\ndocs](https://docs.python.org/3/library/contextlib.html)\n\n## App demo\n\n### FastKafka app\n\nLets now create our application using the created lifespan handler.\n\nNotice how we passed our lifespan handler to the app when constructing\nit trough the `lifespan` argument.\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n```\n\n### Data modeling\n\nLets model the Iris data for our app:\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Consumers and producers\n\nLets create a consumer and producer for our app that will generate\npredictions from input iris data.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Final app\n\nThe final app looks like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Running the app\n\nNow we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n```\n\nWhen you run the app, you should see a simmilar output to the one below:\n\n    [262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [262292]: Loading the model!\n    [262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n    [262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n    [262292]: Exiting, clearing model dict!\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n\n## Recap\n\nIn this guide we have defined a lifespan handler and passed to our\nFastKafka app.\n\nSome important points are:\n\n1.  Lifespan handler is implemented as\n    [AsyncContextManager](https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager)\n2.  Code **before** yield in lifespan will be executed **before**\n    application **startup**\n3.  Code **after** yield in lifespan will be executed **after**\n    application **shutdown**\n4.  You can pass your lifespan handler to FastKafka app on\n    initialisation by passing a `lifespan` argument\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_06_Benchmarking_FastKafka.md",
    "content": "# Benchmarking FastKafka app\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\nTo benchmark a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nproject, you will need the following:\n\n1.  A library built with\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka).\n2.  A running `Kafka` instance to benchmark the FastKafka application\n    against.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nhas a decorator for benchmarking which is appropriately called as\n`benchmark`. Let’s edit our `application.py` file and add the\n`benchmark` decorator to the consumes method.\n\n``` python\n# content of the \"application.py\" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nHere we are conducting a benchmark of a function that consumes data from\nthe `input_data` topic with an interval of 1 second and a sliding window\nsize of 5.\n\nThis `benchmark` method uses the `interval` parameter to calculate the\nresults over a specific time period, and the `sliding_window_size`\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation.\n\nThis benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement.\n\n### Starting Kafka\n\nIf you already have a `Kafka` running somewhere, then you can skip this\nstep.\n\nPlease keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself.\n\n#### Installing Java and Kafka\n\nWe need a working `Kafka`instance to benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp, and to run `Kafka` we need `Java`. Thankfully,\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\ncomes with a CLI to install both `Java` and `Kafka` on our machine.\n\nSo, let’s install `Java` and `Kafka` by executing the following command.\n\n``` cmd\nfastkafka testing install_deps\n```\n\nThe above command will extract `Kafka` scripts at the location\n“\\$HOME/.local/kafka_2.13-3.3.2\" on your machine.\n\n#### Creating configuration for Zookeeper and Kafka\n\nNow we need to start `Zookeeper` and `Kafka` separately, and to start\nthem we need `zookeeper.properties` and `kafka.properties` files.\n\nLet’s create a folder inside the folder where `Kafka` scripts were\nextracted and change directory into it.\n\n``` cmd\nmkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n```\n\nLet’s create a file called `zookeeper.properties` and write the\nfollowing content to the file:\n\n``` txt\ndataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n```\n\nSimilarly, let’s create a file called `kafka.properties` and write the\nfollowing content to the file:\n\n``` txt\nbroker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n```\n\n#### Starting Zookeeper and Kafka\n\nWe need two different terminals to run `Zookeeper` in one and `Kafka` in\nanother. Let’s open a new terminal and run the following commands to\nstart `Zookeeper`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n```\n\nOnce `Zookeeper` is up and running, open a new terminal and execute the\nfollwing commands to start `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n```\n\nNow we have both `Zookeeper` and `Kafka` up and running.\n\n#### Creating topics in Kafka\n\nIn a new terminal, please execute the following command to create\nnecessary topics in `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n```\n\n#### Populating topics with dummy data\n\nTo benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp, we need some data in `Kafka` topics.\n\nIn the same terminal, let’s create some dummy data:\n\n``` cmd\nyes '{\"sepal_length\": 0.7739560486, \"sepal_width\": 0.8636615789, \"petal_length\": 0.6122663046, \"petal_width\": 0.1338914722}' | head -n 1000000 > /tmp/test_data\n```\n\nThis command will create a file called `test_data` in the `tmp` folder\nwith one million rows of text. This will act as dummy data to populate\nthe `input_data` topic.\n\nLet’s populate the created topic `input_data` with the dummy data which\nwe created above:\n\n``` cmd\n./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n```\n\nNow our topic `input_data` has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again.\n\n### Benchmarking FastKafka\n\nOnce `Zookeeper` and `Kafka` are ready, benchmarking\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp is as simple as running the `fastkafka run` command:\n\n``` cmd\nfastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n```\n\nThis command will start the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp and begin consuming messages from `Kafka`, which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the `interval` and `sliding_window_size` values.\n\nThe output for the `fastkafka run` command is:\n\n``` txt\n[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n```\n\nBased on the output, when using 1 worker, our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp achieved a `throughput` of 93k messages per second and an\n`average throughput` of 93k messages per second.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",
    "content": "# Encoding and Decoding Kafka Messages with FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\n1.  A basic knowledge of\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    is needed to proceed with this guide. If you are not familiar with\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka),\n    please go through the [tutorial](/docs#tutorial) first.\n2.  [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    with its dependencies installed is needed. Please install\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    using the command - `pip install fastkafka`\n\n## Ways to Encode and Decode Messages with FastKafka\n\nIn python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings.\n\nIn FastKafka, we specify message schema using Pydantic models as\nmentioned in [tutorial](/docs#messages):\n\n``` python\n# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nThen, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics.\n\nThe `@consumes` and `@produces` methods of FastKafka accept a parameter\ncalled `decoder`/`encoder` to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:\n\n1.  json - This is the default encoder/decoder option in FastKafka.\n    While producing, this option converts our instance of Pydantic model\n    messages to a JSON string and then converts it to bytes before\n    sending it to the topic. While consuming, it converts bytes to a\n    JSON string and then constructs an instance of Pydantic model from\n    the JSON string.\n2.  avro - This option uses Avro encoding/decoding to convert instances\n    of Pydantic model messages to bytes while producing, and while\n    consuming, it constructs an instance of Pydantic model from bytes.\n3.  custom encoder/decoder - If you are not happy with the json or avro\n    encoder/decoder options, you can write your own encoder/decoder\n    functions and use them to encode/decode Pydantic messages.\n\n## 1. Json encoder and decoder\n\nThe default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string.\n\nWe can use the application from [tutorial](/docs#running-the-service) as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder\nparameter:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"json\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"json\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above code, the `@kafka_app.consumes` decorator sets up a\nconsumer for the “input_data\" topic, using the ‘json’ decoder to convert\nthe message payload to an instance of `IrisInputData`. The\n`@kafka_app.produces` decorator sets up a producer for the “predictions\"\ntopic, using the ‘json’ encoder to convert the instance of\n`IrisPrediction` to message payload.\n\n## 2. Avro encoder and decoder\n\n### What is Avro?\n\nAvro is a row-oriented remote procedure call and data serialization\nframework developed within Apache’s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n[docs](https://avro.apache.org/docs/).\n\n### Installing FastKafka with Avro dependencies\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nwith Avro support using the command - `pip install fastkafka[avro]`\n\n### Defining Avro Schema Using Pydantic Models\n\nBy default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models.\n\nSo, similar to the [tutorial](/docs#tutorial), the message schema will\nremain as it is.\n\n``` python\n# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nNo need to change anything to support avro. You can use existing\nPydantic models as is.\n\n### Reusing existing avro schema\n\nIf you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined.\n\n#### Building pydantic models from avro schema dictionary\n\nLet’s modify the above example and let’s assume we have schemas already\nfor `IrisInputData` and `IrisPrediction` which will look like below:\n\n``` python\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n```\n\nWe can easily construct pydantic models from avro schema using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction which is included as part of\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nitself.\n\n``` python\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\nThe above code will convert avro schema to pydantic models and will\nprint pydantic models’ fields. The output of the above is:\n\n``` txt\n{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n```\n\nThis is exactly same as manually defining the pydantic models ourselves.\nYou don’t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction as demonstrated above.\n\n#### Building pydantic models from `.avsc` file\n\nNot all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary `.avsc` files in\nfilesystem. Let’s see how to convert those `.avsc` files to pydantic\nmodels.\n\nLet’s assume our avro files are stored in files called\n`iris_input_data_schema.avsc` and `iris_prediction_schema.avsc`. In that\ncase, following code converts the schema to pydantic models:\n\n``` python\nimport json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open(\"iris_input_data_schema.avsc\", \"rb\") as f:\n    iris_input_data_schema = json.load(f)\n    \nwith open(\"iris_prediction_schema.avsc\", \"rb\") as f:\n    iris_prediction_schema = json.load(f)\n    \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n```\n\n### Consume/Produce avro messages with FastKafka\n\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nprovides `@consumes` and `@produces` methods to consume/produces\nmessages to/from a `Kafka` topic. This is explained in\n[tutorial](/docs#function-decorators).\n\nThe `@consumes` and `@produces` methods accepts a parameter called\n`decoder`/`encoder` to decode/encode avro messages.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", encoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", decoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above example, in `@consumes` and `@produces` methods, we\nexplicitly instruct FastKafka to `decode` and `encode` messages using\nthe `avro` `decoder`/`encoder` instead of the default `json`\n`decoder`/`encoder`.\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use `avro` to `decode` and\n`encode` messages:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThe above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md/#fastkafka.encoder.avsc_to_pydantic)\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages.\n\nThe\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is then instantiated with the broker details, and two functions\ndecorated with `@kafka_app.consumes` and `@kafka_app.produces` are\ndefined to consume messages from the “input_data\" topic and produce\nmessages to the “predictions\" topic, respectively. The functions uses\nthe decoder=“avro\" and encoder=“avro\" parameters to decode and encode\nthe Avro messages.\n\nIn summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline.\n\n## 3. Custom encoder and decoder\n\nIf you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages.\n\n### Writing a custom encoder and decoder\n\nIn this section, let’s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n[ROT13](https://en.wikipedia.org/wiki/ROT13) cipher.\n\n``` python\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n```\n\nThe above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library.\n\nThe encoding function, `custom_encoder()`, takes a message `msg` which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe `json()` method, obfuscates the resulting string using the ROT13\nalgorithm from the `codecs` module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding.\n\nThe decoding function, `custom_decoder()`, takes a raw message `raw_msg`\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the `json.loads()` method and returns a\nnew instance of the specified `cls` class initialized with the decoded\ndictionary.\n\nThese functions can be used with FastKafka’s `encoder` and `decoder`\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics.\n\nLet’s test the above code\n\n``` python\ni = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n```\n\nThis will result in following output\n\n``` txt\nb'{\"frcny_yratgu\": 0.5, \"frcny_jvqgu\": 0.5, \"crgny_yratgu\": 0.5, \"crgny_jvqgu\": 0.5}'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n```\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use our custom decoder and\nencoder functions:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\n\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThis code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system.\n\nThe custom `encoder` function takes a message represented as a\n`BaseModel` and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned.\n\nThe custom `decoder` function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the `json`\nmodule. This dictionary is then converted to a `BaseModel` instance\nusing the cls parameter.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_11_Consumes_Basics.md",
    "content": "# @consumes basics\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@consumes` decorator to consume messages from Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will consume\n`HelloWorld` messages from hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n\nTo use the `@consumes` decorator, first we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\nIn this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger.\n\n``` python\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to\nconsume from the topic using [pydantic](https://docs.pydantic.dev/). For\nthe guide we’ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a consumer function and decorate it with `@consumes`\n\nLet’s create a consumer function that will consume `HelloWorld` messages\nfrom *hello_world* topic and log them.\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nThe function decorated with the `@consumes` decorator will be called\nwhen a message is produced to Kafka.\n\nThe message will then be injected into the typed *msg* argument of the\nfunction and its type will be used to parse the message.\n\nIn this example case, when the message is sent into a *hello_world*\ntopic, it will be parsed into a HelloWorld class and `on_hello_world`\nfunction will be called with the parsed class as *msg* argument value.\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in consumer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.\n\n## Send the message to kafka topic\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\n``` python\nprint(consumer_task.value[1].decode(\"UTF-8\"))\n```\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are\nreceiving the message from, this is because the `@consumes` decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default “on\\_\" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n*hello_world*.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nconsumes decorator, like this:\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in consumes decorator, like this:\n\n## Message data\n\nThe message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of *msg* parameter in the\nfunction decorated by the `@consumes` decorator.\n\nIn this example case, the message will be parsed into a `HelloWorld`\nclass.\n\n## Message metadata\n\nIf you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction.\n\nLet’s demonstrate that.\n\n### Create a consumer function with metadata\n\nThe only difference from the original basic consume function is that we\nare now passing the `meta: EventMetadata` argument to the function. The\n`@consumes` decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains.\n\nFirst, we need to import the EventMetadata\n\nNow we can add the `meta` argument to our consuming function.\n\nYour final app should look like this:\n\nNow lets run the app and send a message to the broker to see the logged\nmessage metadata.\n\nYou should see a similar log as the one below and the metadata being\nlogged in your app.\n\nAs you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n:tada:\n\n## Dealing with high latency consuming functions\n\nIf your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consumeing events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead.\n\nBut, to handle those high latency tasks and run them in parallel,\nFastKafka has a\n[`DynamicTaskExecutor`](../api/fastkafka/executors/DynamicTaskExecutor.md/#fastkafka.executors.DynamicTaskExecutor)\nprepared for your consumers. This executor comes with additional\noverhead, so use it only when you need to handle high latency functions.\n\nLets demonstrate how to use it.\n\n``` python\ndecorate_consumes_executor = \"\"\"@app.consumes(executor=\"DynamicTaskExecutor\")\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n\"\"\"\nmd(f\"```python\\n{decorate_consumes}\\n```\")\n```\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_12_Batch_Consuming.md",
    "content": "# Batch consuming\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nIf you want to consume data in batches `@consumes` decorator makes that\npossible for you. By typing a consumed msg object as a `list` of\nmessages the consumer will call your consuming function with a batch of\nmessages consumed from a single partition. Let’s demonstrate that now.\n\n## Consume function with batching\n\nTo consume messages in batches, you need to wrap you message type into a\nlist and the `@consumes` decorator will take care of the rest for you.\nYour consumes function will be called with batches grouped by partition\nnow.\n\n``` python\n@app.consumes(auto_offset_reset=\"earliest\")\nasync def on_hello_world(msg: List[HelloWorld]):\n    logger.info(f\"Got msg batch: {msg}\")\n```\n\n## App example\n\nWe will modify the app example from [@consumes\nbasics](/docs/guides/Guide_11_Consumes_Basics.md) guide to consume\n`HelloWorld` messages batch. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nimport asyncio\nfrom typing import List\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(auto_offset_reset=\"earliest\")\nasync def on_hello_world(msg: List[HelloWorld]):\n    logger.info(f\"Got msg batch: {msg}\")\n```\n\n## Send the messages to kafka topic\n\nLets send a couple of `HelloWorld` messages to the *hello_world* topic\nand check if our consumer kafka application has logged the received\nmessages batch. In your terminal, run the following command at least two\ntimes to create multiple messages in your kafka queue:\n\n``` shell\necho {\\\"msg\\\": \\\"Hello world\\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nNow we can run the app. Copy the code of the example app in\nconsumer_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nYou should see the your Kafka messages being logged in batches by your\nconsumer.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_21_Produces_Basics.md",
    "content": "# @produces basics\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@produces` decorator to produce messages to Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n\nTo use the `@produces` decorator, frist we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to send\nto the topic using [pydantic](https://docs.pydantic.dev/). For the guide\nwe’ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a producer function and decorate it with `@produces`\n\nLet’s create a producer function that will produce `HelloWorld` messages\nto *hello_world* topic:\n\n``` python\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nNow you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic.\n\nBy default, the topic is determined from your function name, the “to\\_\"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is *hello_world*.\n\n## Instruct the app to start sending HelloWorld messages\n\nLet’s use `@run_in_background` decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second.\n\n``` python\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\n``` python\nscript_file = \"producer_example.py\"\ncmd = \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\"\nmd(\n    f\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```\"\n)\n```\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n\n## Check if the message was sent to the Kafka topic\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the {“msg\": “Hello world!\"} messages in your topic.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are sending\nthe message to, this is because the `@produces` decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default “to\\_\" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n*hello_world*.\n\n!!! warn \"New topics\"\n\n    Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nproduces decorator, like this:\n\n``` python\n\n@app.produces(prefix=\"send_to_\")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in produces decorator, like this:\n\n``` python\n\n@app.produces(topic=\"my_special_topic\")\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\n## Message data\n\nThe return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app.\n\nIn this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n`b'{\"msg\": \"Hello world!\"}'`\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_22_Partition_Keys.md",
    "content": "# Defining a partition key\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nPartition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance.\n\nYou can define your partition keys when using the `@produces` decorator,\nthis guide will demonstrate to you this feature.\n\n## Return a key from the producing function\n\nTo define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass and set the key value. Check the example below:\n\n``` python\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n```\n\nIn the example, we want to return the `HelloWorld` message class with\nthe key defined as *my_key*. So, we wrap the message and key into a\nKafkaEvent class and return it as such.\n\nWhile generating the documentation, the\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass will be unwrapped and the `HelloWorld` class will be documented in\nthe definition of message type, same way if you didn’t use the key.\n\n!!! info \"Which key to choose?\"\n\n    Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n\n## App example\n\nWe will modify the app example from **@producer basics** guide to return\nthe `HelloWorld` with our key. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n\n## Check if the message was sent to the Kafka topic with the desired key\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic with the defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the *my_key {“msg\": “Hello world!\"}* messages in your\ntopic appearing, the *my_key* part of the message is the key that we\ndefined in our producing function.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_23_Batch_Producing.md",
    "content": "# Batch producing\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nIf you want to send your data in batches `@produces` decorator makes\nthat possible for you. By returning a `list` of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker.\n\nThis guide will demonstrate how to use this feature.\n\n## Return a batch from the producing function\n\nTo define a batch that you want to produce to Kafka topic, you need to\nreturn the `List` of the messages that you want to be batched from your\nproducing function.\n\n``` python\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\nIn the example, we want to return the `HelloWorld` message class batch\nthat is created from a list of msgs we passed into our producing\nfunction.\n\nLets also prepare a backgound task that will send a batch of “hello\nworld\" messages when the app starts.\n\n``` python\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n```\n\n## App example\n\nWe will modify the app example from [@producer\nbasics](/docs/guides/Guide_21_Produces_Basics.md) guide to return the\n`HelloWorld` batch. The final app will look like this (make sure you\nreplace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n    [46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n\n## Check if the batch was sent to the Kafka topic with the defined key\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages in your topic.\n\n## Batch key\n\nTo define a key for your batch like in [Defining a partition\nkey](/docs/guides/Guide_22_Partition_Keys.md) guide you can wrap the\nreturning value in a\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass. To learn more about defining a partition ke and\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md/#fastkafka.KafkaEvent)\nclass, please, have a look at [Defining a partition\nkey](/docs/guides/Guide_22_Partition_Keys.md) guide.\n\nLet’s demonstrate that.\n\nTo define a key, we just need to modify our producing function, like\nthis:\n\n``` python\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\nNow our app looks like this:\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\n## Check if the batch was sent to the Kafka topic\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic, containing a defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages with the defined key in your topic.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",
    "content": "# Using multiple Kafka clusters\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nReady to take your FastKafka app to the next level? This guide shows you\nhow to connect to multiple Kafka clusters effortlessly. Consolidate\ntopics and produce messages across clusters like a pro. Unleash the full\npotential of your Kafka-powered app with FastKafka. Let’s dive in and\nelevate your application’s capabilities!\n\n### Test message\n\nTo showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called `TestMsg`. Here’s the\ndefinition of the `TestMsg` class:\n\n``` python\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n```\n\n## Defining multiple broker configurations\n\nWhen building a FastKafka application, you may need to consume messages\nfrom multiple Kafka clusters, each with its own set of broker\nconfigurations. FastKafka provides the flexibility to define different\nbroker clusters using the brokers argument in the consumes decorator.\nLet’s explore an example code snippet\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(\n    development=dict(url=\"dev.server_1\", port=9092),\n    production=dict(url=\"prod.server_1\", port=9092),\n)\nkafka_brokers_2 = dict(\n    development=dict(url=\"dev.server_2\", port=9092),\n    production=dict(url=\"prod.server_1\", port=9092),\n)\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n    print(f\"Received on s1: {msg=}\")\n    await to_predictions_1(msg)\n\n\n@app.consumes(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n    print(f\"Received on s2: {msg=}\")\n    await to_predictions_2(msg)\n    \n@app.produces(topic=\"predictions\")\nasync def to_predictions_1(msg: TestMsg) -> TestMsg:\n    return msg\n    \n@app.produces(topic=\"predictions\", brokers=kafka_brokers_2)\nasync def to_predictions_2(msg: TestMsg) -> TestMsg:\n    return msg\n```\n\nIn this example, the application has two consumes endpoints, both of\nwhich will consume events from `preprocessed_signals` topic.\n`on_preprocessed_signals_1` will consume events from `kafka_brokers_1`\nconfiguration and `on_preprocessed_signals_2` will consume events from\n`kafka_brokers_2` configuration. When producing, `to_predictions_1` will\nproduce to `predictions` topic on `kafka_brokers_1` cluster and\n`to_predictions_2` will produce to `predictions` topic on\n`kafka_brokers_2` cluster.\n\n#### How it works\n\nThe `kafka_brokers_1` configuration represents the primary cluster,\nwhile `kafka_brokers_2` serves as an alternative cluster specified in\nthe decorator.\n\nUsing the FastKafka class, the app object is initialized with the\nprimary broker configuration (`kafka_brokers_1`). By default, the\n`@app.consumes` decorator without the brokers argument consumes messages\nfrom the `preprocessed_signals` topic on `kafka_brokers_1`.\n\nTo consume messages from a different cluster, the `@app.consumes`\ndecorator includes the `brokers` argument. This allows explicit\nspecification of the broker cluster in the `on_preprocessed_signals_2`\nfunction, enabling consumption from the same topic but using the\n`kafka_brokers_2` configuration.\n\nThe brokers argument can also be used in the @app.produces decorator to\ndefine multiple broker clusters for message production.\n\nIt’s important to ensure that all broker configurations have the same\nrequired settings as the primary cluster to ensure consistent behavior.\n\n## Testing the application\n\nTo test our FastKafka ‘mirroring’ application, we can use our testing\nframework. Lets take a look how it’s done:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\n    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\"signal_s1\"))\n    # Assert on_preprocessed_signals_1 consumed sent message\n    await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\n        TestMsg(msg=\"signal_s1\"), timeout=5\n    )\n    # Assert app has produced a prediction\n    await tester.mirrors[app.to_predictions_1].assert_called_with(\n        TestMsg(msg=\"signal_s1\"), timeout=5\n    )\n\n    # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\n    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\"signal_s2\"))\n    # Assert on_preprocessed_signals_2 consumed sent message\n    await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\n        TestMsg(msg=\"signal_s2\"), timeout=5\n    )\n    # Assert app has produced a prediction\n    await tester.mirrors[app.to_predictions_2].assert_called_with(\n        TestMsg(msg=\"signal_s2\"), timeout=5\n    )\n```\n\n    23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:33:08.721 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:33:08.721 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n    23-05-30 10:33:08.722 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.722 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n    23-05-30 10:33:08.723 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.741 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n    23-05-30 10:33:08.741 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.742 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n    23-05-30 10:33:08.743 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:08.744 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n    23-05-30 10:33:08.746 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:08.749 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n    23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.755 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.756 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:08.756 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n    23-05-30 10:33:08.758 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.758 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:33:08.759 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:08.760 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:08.761 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n    23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:08.762 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:08.763 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:33:08.763 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    Received on s1: msg=TestMsg(msg='signal_s1')\n    Received on s2: msg=TestMsg(msg='signal_s2')\n    23-05-30 10:33:13.745 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.747 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.748 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.751 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:33:13.754 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nThe usage of the `tester.mirrors` dictionary allows specifying the\ndesired topic/broker combination for sending the test messages,\nespecially when working with multiple Kafka clusters. This ensures that\nthe data is sent to the appropriate topic/broker based on the consuming\nfunction, and consumed from appropriate topic/broker based on the\nproducing function.\n\n## Running the application\n\nYou can run your application using `fastkafka run` CLI command in the\nsame way that you would run a single cluster app.\n\nTo start your app, copy the code above in multi_cluster_example.py and\nrun it by running:\n\nNow we can run the app. Copy the code above in multi_cluster_example.py,\nadjust your server configurations, and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\n```\n\nIn your app logs, you should see your app starting up and your two\nconsumer functions connecting to different kafka clusters.\n\n    [90735]: 23-05-30 10:33:29.699 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n    [90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:57647'}\n    [90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n    [90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n    [90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n    [90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n    [90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [90735]: 23-05-30 10:33:29.722 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n    [90735]: 23-05-30 10:33:29.723 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n    Starting process cleanup, this may take a few seconds...\n    23-05-30 10:33:33.548 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 90735...\n    [90735]: 23-05-30 10:33:34.666 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:34.777 [INFO] fastkafka._server: terminate_asyncio_process(): Process 90735 terminated.\n\n## Application documentation\n\nAt the moment the documentation for multicluster app is not yet\nimplemented, but it is under development and you can expecti it soon!\n\n## Examples on how to use multiple broker configurations\n\n### Example \\#1\n\nIn this section, we’ll explore how you can effectively forward topics\nbetween different Kafka clusters, enabling seamless data synchronization\nfor your applications.\n\nImagine having two Kafka clusters, namely `kafka_brokers_1` and\n`kafka_brokers_2`, each hosting its own set of topics and messages. Now,\nif you want to forward a specific topic (in this case:\n`preprocessed_signals`) from kafka_brokers_1 to kafka_brokers_2,\nFastKafka provides an elegant solution.\n\nLet’s examine the code snippet that configures our application for topic\nforwarding:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_original(msg: TestMsg):\n    await to_preprocessed_signals_forward(msg)\n\n\n@app.produces(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\n    return data\n```\n\nHere’s how it works: our FastKafka application is configured to consume\nmessages from `kafka_brokers_1` and process them in the\n`on_preprocessed_signals_original` function. We want to forward these\nmessages to `kafka_brokers_2`. To achieve this, we define the\n`to_preprocessed_signals_forward` function as a producer, seamlessly\nproducing the processed messages to the preprocessed_signals topic\nwithin the `kafka_brokers_2` cluster.\n\n#### Testing\n\nTo test our FastKafka forwarding application, we can use our testing\nframework. Let’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg=\"signal\"))\n    await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\n```\n\n    23-05-30 10:33:40.969 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:33:40.970 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:33:40.971 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:33:40.972 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-05-30 10:33:40.972 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:40.982 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:33:40.982 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:40.983 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:40.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:33:40.984 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:40.985 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:40.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:40.986 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:40.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-05-30 10:33:40.989 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:40.989 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:40.991 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:44.983 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:44.986 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:44.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:33:44.988 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nWith the help of the **Tester** object, we can simulate and verify the\nbehavior of our FastKafka application. Here’s how it works:\n\n1.  We create an instance of the **Tester** by passing in our *app*\n    object, which represents our FastKafka application.\n\n2.  Using the **tester.mirrors** dictionary, we can send a message to a\n    specific Kafka broker and topic combination. In this case, we use\n    `tester.mirrors[app.on_preprocessed_signals_original]` to send a\n    TestMsg message with the content “signal\" to the appropriate Kafka\n    broker and topic.\n\n3.  After sending the message, we can perform assertions on the mirrored\n    function using\n    `tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)`.\n    This assertion ensures that the mirrored function has been called\n    within a specified timeout period (in this case, 5 seconds).\n\n### Example \\#2\n\nIn this section, we’ll explore how you can effortlessly consume data\nfrom multiple sources, process it, and aggregate the results into a\nsingle topic on a specific cluster.\n\nImagine you have two Kafka clusters: **kafka_brokers_1** and\n**kafka_brokers_2**, each hosting its own set of topics and messages.\nNow, what if you want to consume data from both clusters, perform some\nprocessing, and produce the results to a single topic on\n**kafka_brokers_1**? FastKafka has got you covered!\n\nLet’s take a look at the code snippet that configures our application\nfor aggregating multiple clusters:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n    print(f\"Default: {msg=}\")\n    await to_predictions(msg)\n\n\n@app.consumes(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n    print(f\"Specified: {msg=}\")\n    await to_predictions(msg)\n\n\n@app.produces(topic=\"predictions\")\nasync def to_predictions(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction: {prediction}\")\n    return [prediction]\n```\n\nHere’s the idea: our FastKafka application is set to consume messages\nfrom the topic “preprocessed_signals\" on **kafka_brokers_1** cluster, as\nwell as from the same topic on **kafka_brokers_2** cluster. We have two\nconsuming functions, `on_preprocessed_signals_1` and\n`on_preprocessed_signals_2`, that handle the messages from their\nrespective clusters. These functions perform any required processing, in\nthis case, just calling the to_predictions function.\n\nThe exciting part is that the to_predictions function acts as a\nproducer, sending the processed results to the “predictions\" topic on\n**kafka_brokers_1 cluster**. By doing so, we effectively aggregate the\ndata from multiple sources into a single topic on a specific cluster.\n\nThis approach enables you to consume data from multiple Kafka clusters,\nprocess it, and produce the aggregated results to a designated topic.\nWhether you’re generating predictions, performing aggregations, or any\nother form of data processing, FastKafka empowers you to harness the\nfull potential of multiple clusters.\n\n#### Testing\n\nLet’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\"signal\"))\n    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\"signal\"))\n    await tester.on_predictions.assert_called(timeout=5)\n```\n\n    23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:33:50.828 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:33:50.829 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:33:50.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:50.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:33:50.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:50.876 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-05-30 10:33:50.876 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:33:50.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:50.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:50.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:50.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:50.880 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-05-30 10:33:50.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:50.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:50.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:50.883 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:33:50.884 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:33:50.885 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:33:50.885 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:33:50.886 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    Default: msg=TestMsg(msg='signal')\n    Sending prediction: msg='signal'\n    Specified: msg=TestMsg(msg='signal')\n    Sending prediction: msg='signal'\n    23-05-30 10:33:54.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:54.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:54.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:54.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:54.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:33:54.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:33:54.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nHere’s how the code above works:\n\n1.  Within an `async with` block, create an instance of the Tester by\n    passing in your app object, representing your FastKafka application.\n\n2.  Using the tester.mirrors dictionary, you can send messages to\n    specific Kafka broker and topic combinations. In this case, we use\n    `tester.mirrors[app.on_preprocessed_signals_1]` and\n    `tester.mirrors[app.on_preprocessed_signals_2]` to send TestMsg\n    messages with the content “signal\" to the corresponding Kafka broker\n    and topic combinations.\n\n3.  After sending the messages, you can perform assertions on the\n    **on_predictions** function using\n    `tester.on_predictions.assert_called(timeout=5)`. This assertion\n    ensures that the on_predictions function has been called within a\n    specified timeout period (in this case, 5 seconds).\n\n### Example \\#3\n\nIn some scenarios, you may need to produce messages to multiple Kafka\nclusters simultaneously. FastKafka simplifies this process by allowing\nyou to configure your application to produce messages to multiple\nclusters effortlessly. Let’s explore how you can achieve this:\n\nConsider the following code snippet that demonstrates producing messages\nto multiple clusters:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals(msg: TestMsg):\n    print(f\"{msg=}\")\n    await to_predictions_1(TestMsg(msg=\"prediction\"))\n    await to_predictions_2(TestMsg(msg=\"prediction\"))\n\n\n@app.produces(topic=\"predictions\")\nasync def to_predictions_1(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction to s1: {prediction}\")\n    return [prediction]\n\n\n@app.produces(topic=\"predictions\", brokers=kafka_brokers_2)\nasync def to_predictions_2(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction to s2: {prediction}\")\n    return [prediction]\n```\n\nHere’s what you need to know about producing to multiple clusters:\n\n1.  We define two Kafka broker configurations: **kafka_brokers_1** and\n    **kafka_brokers_2**, representing different clusters with their\n    respective connection details.\n\n2.  We create an instance of the FastKafka application, specifying\n    **kafka_brokers_1** as the primary cluster for producing messages.\n\n3.  The `on_preprocessed_signals` function serves as a consumer,\n    handling incoming messages from the “preprocessed_signals\" topic.\n    Within this function, we invoke two producer functions:\n    `to_predictions_1` and `to_predictions_2`.\n\n4.  The `to_predictions_1` function sends predictions to the\n    “predictions\" topic on *kafka_brokers_1* cluster.\n\n5.  Additionally, the `to_predictions_2` function sends the same\n    predictions to the “predictions\" topic on *kafka_brokers_2* cluster.\n    This allows for producing the same data to multiple clusters\n    simultaneously.\n\nBy utilizing this approach, you can seamlessly produce messages to\nmultiple Kafka clusters, enabling you to distribute data across\ndifferent environments or leverage the strengths of various clusters.\n\nFeel free to customize the producer functions as per your requirements,\nperforming any necessary data transformations or enrichment before\nsending the predictions.\n\nWith FastKafka, producing to multiple clusters becomes a breeze,\nempowering you to harness the capabilities of multiple environments\neffortlessly.\n\n#### Testing\n\nLet’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.to_preprocessed_signals(TestMsg(msg=\"signal\"))\n    await tester.mirrors[to_predictions_1].assert_called(timeout=5)\n    await tester.mirrors[to_predictions_2].assert_called(timeout=5)\n```\n\n    23-05-30 10:34:00.033 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    23-05-30 10:34:00.034 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-05-30 10:34:00.035 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-05-30 10:34:00.036 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:34:00.037 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:34:00.038 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-05-30 10:34:00.038 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:34:00.052 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-05-30 10:34:00.053 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-05-30 10:34:00.054 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:34:00.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:34:00.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:34:00.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:34:00.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:34:00.057 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:34:00.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-05-30 10:34:00.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:34:00.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:34:00.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:34:00.062 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:34:00.062 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-05-30 10:34:00.064 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-05-30 10:34:00.064 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-05-30 10:34:00.065 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    msg=TestMsg(msg='signal')\n    Sending prediction to s1: msg='prediction'\n    Sending prediction to s2: msg='prediction'\n    23-05-30 10:34:04.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:34:04.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:34:04.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:34:04.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-05-30 10:34:04.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-05-30 10:34:04.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-30 10:34:04.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nHere’s how you can perform the necessary tests:\n\n1.  Within an async with block, create an instance of the **Tester** by\n    passing in your app object, representing your FastKafka application.\n\n2.  Using the `tester.to_preprocessed_signals` method, you can send a\n    TestMsg message with the content “signal\".\n\n3.  After sending the message, you can perform assertions on the\n    to_predictions_1 and to_predictions_2 functions using\n    `tester.mirrors[to_predictions_1].assert_called(timeout=5)` and\n    `tester.mirrors[to_predictions_2].assert_called(timeout=5)`. These\n    assertions ensure that the respective producer functions have\n    produced data to their respective topic/broker combinations.\n\nBy employing this testing approach, you can verify that the producing\nfunctions correctly send messages to their respective clusters. The\ntesting framework provided by FastKafka enables you to ensure the\naccuracy and reliability of your application’s producing logic.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",
    "content": "# Deploying FastKafka using Docker\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Building a Docker Image\n\nTo build a Docker image for a FastKafka project, we need the following\nitems:\n\n1.  A library that is built using FastKafka.\n2.  A file in which the requirements are specified. This could be a\n    requirements.txt file, a setup.py file, or even a wheel file.\n3.  A Dockerfile to build an image that will include the two files\n    mentioned above.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Creating requirements.txt file\n\nThe above code only requires `fastkafka`. So, we will add only\n`fastkafka` to the `requirements.txt` file, but you can add additional\nrequirements to it as well.\n\n``` txt\nfastkafka>=0.3.0\n```\n\nHere we are using `requirements.txt` to store the project’s\ndependencies. However, other methods like `setup.py`, `pipenv`, and\n`wheel` files can also be used. `setup.py` is commonly used for\npackaging and distributing Python modules, while `pipenv` is a tool used\nfor managing virtual environments and package dependencies. `wheel`\nfiles are built distributions of Python packages that can be installed\nwith pip.\n\n### Creating Dockerfile\n\n``` dockerfile\n# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD [\"fastkafka\", \"run\", \"--num-workers\", \"2\", \"--kafka-broker\", \"production\", \"application:kafka_app\"]\n```\n\n1.  Start from the official Python base image.\n\n2.  Set the current working directory to `/project`.\n\n    This is where we’ll put the `requirements.txt` file and the\n    `application.py` file.\n\n3.  Copy the `application.py` file and `requirements.txt` file inside\n    the `/project` directory.\n\n4.  Install the package dependencies in the requirements file.\n\n    The `--no-cache-dir` option tells `pip` to not save the downloaded\n    packages locally, as that is only if `pip` was going to be run again\n    to install the same packages, but that’s not the case when working\n    with containers.\n\n    The `--upgrade` option tells `pip` to upgrade the packages if they\n    are already installed.\n\n5.  Set the **command** to run the `fastkafka run` command.\n\n    `CMD` takes a list of strings, each of these strings is what you\n    would type in the command line separated by spaces.\n\n    This command will be run from the **current working directory**, the\n    same `/project` directory you set above with `WORKDIR /project`.\n\n    We supply additional parameters `--num-workers` and `--kafka-broker`\n    for the run command. Finally, we specify the location of our\n    `fastkafka` application location as a command argument.\n\n    To learn more about `fastkafka run` command please check the [CLI\n    docs](../../cli/fastkafka/#fastkafka-run).\n\n### Build the Docker Image\n\nNow that all the files are in place, let’s build the container image.\n\n1.  Go to the project directory (where your `Dockerfile` is, containing\n    your `application.py` file).\n\n2.  Run the following command to build the image:\n\n    ``` cmd\n    docker build -t fastkafka_project_image .\n    ```\n\n    This command will create a docker image with the name\n    `fastkafka_project_image` and the `latest` tag.\n\nThat’s it! You have now built a docker image for your FastKafka project.\n\n### Start the Docker Container\n\nRun a container based on the built image:\n\n``` cmd\ndocker run -d --name fastkafka_project_container fastkafka_project_image\n```\n\n## Additional Security\n\n`Trivy` is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here’s\nhow you can use `trivy` to scan your `fastkafka_project_image`:\n\n1.  Install `trivy` on your local machine by following the instructions\n    provided in the [official `trivy`\n    documentation](https://aquasecurity.github.io/trivy/latest/getting-started/installation/).\n\n2.  Run the following command to scan your fastkafka_project_image:\n\n    ``` cmd\n    trivy image fastkafka_project_image\n    ```\n\n    This command will scan your `fastkafka_project_image` for any\n    vulnerabilities and provide you with a report of its findings.\n\n3.  Fix any vulnerabilities identified by `trivy`. You can do this by\n    updating the vulnerable package to a more secure version or by using\n    a different package altogether.\n\n4.  Rebuild your `fastkafka_project_image` and repeat steps 2 and 3\n    until `trivy` reports no vulnerabilities.\n\nBy using `trivy` to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities.\n\n## Example repo\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n[here](https://github.com/airtai/sample_fastkafka_project/)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",
    "content": "# Using Redpanda to test FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## What is FastKafka?\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## What is Redpanda?\n\nRedpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda.\n\nFrom [redpanda.com](https://redpanda.com/):\n\n> Redpanda is a Kafka®-compatible streaming data platform that is proven\n> to be 10x faster and 6x lower in total costs. It is also JVM-free,\n> ZooKeeper®-free, Jepsen-tested and source available.\n\nSome of the advantages of Redpanda over Kafka are\n\n1.  A single binary with built-in everything, no ZooKeeper® or JVM\n    needed.\n2.  Costs upto 6X less than Kafka.\n3.  Up to 10x lower average latencies and up to 6x faster Kafka\n    transactions without compromising correctness.\n\nTo learn more about Redpanda, please visit their\n[website](https://redpanda.com/) or checkout this [blog\npost](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\ncomparing Redpanda and Kafka’s performance benchmarks.\n\n## Example repo\n\nA sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n[here](https://github.com/airtai/sample_fastkafka_with_redpanda).\n\n## The process\n\nHere are the steps we’ll be walking through to build our example:\n\n1.  Set up the prerequisites.\n2.  Clone the example repo.\n3.  Explain how to write an application using FastKafka.\n4.  Explain how to write a test case to test FastKafka with Redpanda.\n5.  Run the test case and produce/consume messages.\n\n## 1. Prerequisites\n\nBefore starting, make sure you have the following prerequisites set up:\n\n1.  **Python 3.x**: A Python 3.x installation is required to run\n    FastKafka. You can download the latest version of Python from the\n    [official website](https://www.python.org/downloads/). You’ll also\n    need to have pip installed and updated, which is Python’s package\n    installer.\n2.  **Docker Desktop**: Docker is used to run Redpanda, which is\n    required for testing FastKafka. You can download and install Docker\n    Desktop from the [official\n    website](https://www.docker.com/products/docker-desktop/).\n3.  **Git**: You’ll need to have Git installed to clone the example\n    repo. You can download Git from the [official\n    website](https://git-scm.com/downloads).\n\n## 2. Cloning and setting up the example repo\n\nTo get started with the example code, clone the [GitHub\nrepository](https://github.com/airtai/sample_fastkafka_with_redpanda) by\nrunning the following command in your terminal:\n\n``` cmd\ngit clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n```\n\nThis will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files.\n\n### Create a virtual environment\n\nBefore writing any code, let’s [create a new virtual\nenvironment](https://docs.python.org/3/library/venv.html#module-venv)\nfor our project.\n\nA virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects.\n\nTo create a new virtual environment, run the following commands in your\nterminal:\n\n``` cmd\npython3 -m venv venv\n```\n\nThis will create a new directory called `venv` in your project\ndirectory, which will contain the virtual environment.\n\nTo activate the virtual environment, run the following command:\n\n``` cmd\nsource venv/bin/activate\n```\n\nThis will change your shell’s prompt to indicate that you are now\nworking inside the virtual environment.\n\nFinally, run the following command to upgrade `pip`, the Python package\ninstaller:\n\n``` cmd\npip install --upgrade pip\n```\n\n### Install Python dependencies\n\nNext, let’s install the required Python dependencies. In this guide,\nwe’ll be using\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nto write our application code and `pytest` and `pytest-asyncio` to test\nit.\n\nYou can install the dependencies from the `requirements.txt` file\nprovided in the cloned repository by running:\n\n``` cmd\npip install -r requirements.txt\n```\n\nThis will install all the required packages and their dependencies.\n\n## 3. Writing server code\n\nThe `application.py` file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/usage/types/#constrained-types),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker.\n\nNext, an instance of the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum required arguments:\n\n- `kafka_brokers`: a dictionary used for generating documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## 4. Writing the test code\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstance which can be configured to start a [Redpanda\nbroker](../../api/fastkafka/testing/LocalRedpandaBroker/) for testing\npurposes. The `test.py` file in the cloned repository contains the\nfollowing code for testing.\n\n``` python\nimport pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n    # Start Tester app and create local Redpanda broker for testing\n    async with Tester(kafka_app).using_local_redpanda(\n        tag=\"v23.1.2\", listener_port=9092\n    ) as tester:\n        # Send IrisInputData message to input_data topic\n        await tester.to_input_data(msg)\n\n        # Assert that the kafka_app responded with IrisPrediction in predictions topic\n        await tester.awaited_mocks.on_predictions.assert_awaited_with(\n            IrisPrediction(species=\"setosa\"), timeout=2\n        )\n```\n\nThe\n[`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\nmodule utilizes uses\n[`LocalRedpandaBroker`](../api/fastkafka/testing/LocalRedpandaBroker.md/#fastkafka.testing.LocalRedpandaBroker)\nto start and stop a Redpanda broker for testing purposes using Docker\n\n## 5. Running the tests\n\nWe can run the tests which is in `test.py` file by executing the\nfollowing command:\n\n``` cmd\npytest test.py\n```\n\nThis will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:\n\n``` cmd\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item                                                                 \n\ntest.py .                                                                  [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n```\n\nRunning the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable.\n\n### Recap\n\nWe have created an Iris classification model and encapulated it into our\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napplication. The app will consume the `IrisInputData` from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our\n    [`Tester`](../api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\n    class with `Redpanda` broker which mirrors the developed app topics\n    for testing purposes\n\n3.  Sent `IrisInputData` message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to `IrisInputData` message\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",
    "content": "# Using FastAPI to Run FastKafka Application\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nWhen deploying a FastKafka application, the default approach is to\nutilize the [`fastkafka run`](/docs/cli/fastkafka#fastkafka-run) CLI\ncommand. This command allows you to launch your FastKafka application as\na standalone service. However, if you already have a FastAPI application\nin place and wish to run FastKafka application alongside it, you have an\nalternative option.\n\nFastKafka provides a method called\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md/#fastkafka._application.app.FastKafka.fastapi_lifespan)\nthat leverages [FastAPI’s\nlifespan](https://fastapi.tiangolo.com/advanced/events/#lifespan-events)\nfeature. This method allows you to run your FastKafka application\ntogether with your existing FastAPI app, seamlessly integrating their\nfunctionalities. By using the\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md/#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod, you can start the FastKafka application within the same process\nas the FastAPI app.\n\nThe\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md/#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod ensures that both FastAPI and FastKafka are initialized and start\nworking simultaneously. This approach enables the execution of\nKafka-related tasks, such as producing and consuming messages, while\nalso handling HTTP requests through FastAPI’s routes.\n\nBy combining FastAPI and FastKafka in this manner, you can build a\ncomprehensive application that harnesses the power of both frameworks.\nWhether you require real-time messaging capabilities or traditional HTTP\nendpoints, this approach allows you to leverage the strengths of FastAPI\nand FastKafka within a single deployment setup.\n\n## Prerequisites\n\n1.  A basic knowledge of\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    is needed to proceed with this guide. If you are not familiar with\n    [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka),\n    please go through the [tutorial](/docs#tutorial) first.\n2.  [`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\n    and `FastAPI` libraries needs to be installed.\n\nThis guide will provide a step-by-step explanation, taking you through\neach stage individually, before combining all the components in the\nfinal section for a comprehensive understanding of the process.\n\n## 1. Basic FastKafka app\n\nIn this step, we will begin by creating a simple FastKafka application.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Greetings\",\n    kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n    await to_greetings(TestMsg(msg=f\"Hello {msg.msg}\"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n    return greeting\n```\n\nIn the above example, we consume messages from a topic called `names`,\nwe prepend “Hello\" to the message, and send it back to another topic\ncalled `greetings`.\n\nWe now have a simple\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napp to produce and consume from two topics.\n\n## 2. Using fastapi_lifespan method\n\nIn this step of the guide, we will explore the integration of a\nFastKafka application with a FastAPI application using the\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md/#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod. The\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md/#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod is a feature provided by FastKafka, which allows you to\nseamlessly integrate a FastKafka application with a FastAPI application\nby leveraging FastAPI’s lifespan feature.\n\n``` python\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name=\"localhost\"))\n\n\n@fastapi_app.get(\"/hello\")\nasync def hello():\n    return {\"msg\": \"hello there\"}\n```\n\nIn the above example, a new instance of the `FastAPI` app is created,\nand when the app is started using uvicorn, it also runs the\n[`FastKafka`](../api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\napplication concurrently.\n\n## Putting it all together\n\nLet’s put the above code together and write it in a file called\n`fast_apps.py`.\n\n``` python\n# content of the \"fast_apps.py\" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Greetings\",\n    kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n    await to_greetings(TestMsg(msg=f\"Hello {msg.msg}\"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n    return greeting\n\n\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(\"localhost\"))\n\n@fastapi_app.get(\"/hello\")\nasync def hello():\n    return {\"msg\": \"hello there\"}\n```\n\nFinally, you can run the FastAPI application using a web server of your\nchoice, such as Uvicorn or Hypercorn by running the below command:\n\n``` cmd\nuvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\n```\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/index.md",
    "content": "# FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n<b>Effortless Kafka integration for your web services</b>\n\n------------------------------------------------------------------------\n\n![PyPI](https://img.shields.io/pypi/v/fastkafka.png) ![PyPI -\nDownloads](https://img.shields.io/pypi/dm/fastkafka.png) ![PyPI - Python\nVersion](https://img.shields.io/pypi/pyversions/fastkafka.png)\n\n![GitHub Workflow\nStatus](https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml)\n![CodeQL](https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg)\n![Dependency\nReview](https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg)\n\n![GitHub](https://img.shields.io/github/license/airtai/fastkafka.png)\n\n------------------------------------------------------------------------\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n------------------------------------------------------------------------\n\n#### ⭐⭐⭐ Stay in touch ⭐⭐⭐\n\nPlease show your support and stay in touch by:\n\n- giving our [GitHub repository](https://github.com/airtai/fastkafka/) a\n  star, and\n\n- joining our [Discord server](https://discord.gg/CJWmYpyFbc).\n\nYour support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!\n\n------------------------------------------------------------------------\n\n#### 🐝🐝🐝 We were busy lately 🐝🐝🐝\n\n![Activity](https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg \"Repobeats analytics image\")\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of `fastkafka` with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\nTo install fastkafka with testing features please use:\n\n``` sh\npip install fastkafka[test]\n```\n\nTo install fastkafka with asyncapi docs please use:\n\n``` sh\npip install fastkafka[docs]\n```\n\nTo install fastkafka with all the features please use:\n\n``` sh\npip install fastkafka[test,docs]\n```\n\n## Tutorial\n\nYou can start an interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open in Colab\" />\n</a>\n\n## Writing server code\n\nTo demonstrate FastKafka simplicity of using `@produces` and `@consumes`\ndecorators, we will focus on a simple app.\n\nThe app will consume jsons containig positive floats from one topic, log\nthem and then produce incremented values to another topic.\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines one `Data` mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n`NonNegativeFloat` field `data` that will be logged and “processed\"\nbefore being produced to another topic.\n\nThese message class will be used to parse and validate incoming data in\nKafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker.\n\nNext, an object of the\n[`FastKafka`](./api/fastkafka/FastKafka.md/#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\nWe will also import and create a logger so that we can log the incoming\ndata in our consuming function.\n\n``` python\nfrom logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger(\"Demo Kafka app\")\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `Data` message class. Specifying the type of the\n  single argument is instructing the Pydantic to use `Data.parse_raw()`\n  on the consumed message before passing it to the user defined function\n  `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_output_data` function,\n  which specifies that this function should produce a message to the\n  “output_data\" Kafka topic whenever it is called. The `to_output_data`\n  function takes a single float argument `data`. It it increments the\n  data returns it wrapped in a `Data` object. The framework will call\n  the `Data.json().encode(\"utf-8\")` function on the returned value and\n  produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](./api/fastkafka/testing/Tester.md/#fastkafka.testing.Tester)\ninstances which internally starts InMemory implementation of Kafka\nbroker.\n\nThe Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies.\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = Data(\n    data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send Data message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with incremented data in output_data topic\n    await tester.awaited_mocks.on_output_data.assert_awaited_with(\n        Data(data=1.1), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] Demo Kafka app: Got data: 0.1\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a simple fastkafka application. The app will consume the\n`Data` from the `input_data` topic, log it and produce the incremented\ndata to `output_data` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purposes\n\n3.  Sent Data message to `input_data` topic\n\n4.  Asserted and checked that the developed service has reacted to Data\n    message\n\n## Running the service\n\nThe service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\nTo run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command.\n\n``` sh\nfastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n```\n\nAfter running the command, you should see the following output in your\ncommand line:\n\n    [1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n    [1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n    23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nAsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfollowing command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n    23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\nThis will generate the *asyncapi* folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxr-xr-x 4 root root 4096 May 31 11:38 docs\n    drwxr-xr-x 2 root root 4096 May 31 11:38 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET / HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/global.min.css HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /js/asyncapi-ui.min.js HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/asyncapi.min.css HTTP/1.1\" 200 -\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n\n## License\n\nFastKafka is licensed under the Apache License 2.0\n\nA permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code.\n\nThe full text of the license can be found\n[here](https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/overrides/css/extra.css",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/overrides/js/extra.js",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/overrides/js/math.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.7.1/overrides/js/mathjax.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/CHANGELOG.md",
    "content": "# Release notes\n\n<!-- do not remove -->\n\n## 0.7.0\n\n### New Features\n\n- Optional description argument to consumes and produces decorator implemented ([#338](https://github.com/airtai/fastkafka/pull/338)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumes and produces decorators now have optional `description` argument that is used instead of function docstring in async doc generation when specified\n\n- FastKafka Windows OS support enabled ([#326](https://github.com/airtai/fastkafka/pull/326)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now run on Windows\n\n- FastKafka and FastAPI integration implemented ([#304](https://github.com/airtai/fastkafka/pull/304)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n  - FastKafka can now be run alongside FastAPI\n\n- Batch consuming option to consumers implemented ([#298](https://github.com/airtai/fastkafka/pull/298)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Consumers can consume events in batches by specifying msg type of consuming function as `List[YourMsgType]` \n\n- Removed support for synchronous produce functions ([#295](https://github.com/airtai/fastkafka/pull/295)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Added default broker values and update docs ([#292](https://github.com/airtai/fastkafka/pull/292)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n### Bugs Squashed\n\n- Fix index.ipynb to be runnable in colab ([#342](https://github.com/airtai/fastkafka/issues/342))\n\n- Use cli option root_path docs generate and serve CLI commands ([#341](https://github.com/airtai/fastkafka/pull/341)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Fix incorrect asyncapi docs path on fastkafka docs serve command ([#335](https://github.com/airtai/fastkafka/pull/335)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Serve docs now takes app `root_path` argument into consideration when specified in app\n\n- Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps ([#315](https://github.com/airtai/fastkafka/issues/315))\n\n- Fix logs printing timestamps ([#308](https://github.com/airtai/fastkafka/issues/308))\n\n- Fix topics with dots causing failure of tester instantiation ([#306](https://github.com/airtai/fastkafka/pull/306)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Specified topics can now have \".\" in their names\n\n## 0.6.0\n\n### New Features\n\n- Timestamps added to CLI commands ([#283](https://github.com/airtai/fastkafka/pull/283)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Added option to process messages concurrently ([#278](https://github.com/airtai/fastkafka/pull/278)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - A new `executor` option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.\n\n- Add consumes and produces functions to app ([#274](https://github.com/airtai/fastkafka/pull/274)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Add batching for producers ([#273](https://github.com/airtai/fastkafka/pull/273)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Fix broken links in guides ([#272](https://github.com/airtai/fastkafka/pull/272)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Generate the docusaurus sidebar dynamically by parsing summary.md ([#270](https://github.com/airtai/fastkafka/pull/270)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Metadata passed to consumer ([#269](https://github.com/airtai/fastkafka/pull/269)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - requirement(key): read the key value somehow..Maybe I missed something in the docs\n    requirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations.\n\n    https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063\n\n- Contribution with instructions how to build and test added ([#255](https://github.com/airtai/fastkafka/pull/255)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n\n- Export encoders, decoders from fastkafka.encoder ([#246](https://github.com/airtai/fastkafka/pull/246)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n- Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. ([#239](https://github.com/airtai/fastkafka/issues/239))\n\n\n- UI Improvement: Post screenshots with links to the actual messages in testimonials section ([#228](https://github.com/airtai/fastkafka/issues/228))\n\n### Bugs Squashed\n\n- Batch testing fix ([#280](https://github.com/airtai/fastkafka/pull/280)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Tester breaks when using Batching or KafkaEvent producers ([#279](https://github.com/airtai/fastkafka/issues/279))\n\n- Consumer loop callbacks are not executing in parallel ([#276](https://github.com/airtai/fastkafka/issues/276))\n\n\n## 0.5.0\n\n### New Features\n\n- Significant speedup of Kafka producer ([#236](https://github.com/airtai/fastkafka/pull/236)), thanks to [@Sternakt](https://github.com/Sternakt)\n \n\n- Added support for AVRO encoding/decoding ([#231](https://github.com/airtai/fastkafka/pull/231)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n### Bugs Squashed\n\n- Fixed sidebar to include guides in docusaurus documentation ([#238](https://github.com/airtai/fastkafka/pull/238)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Fixed link to symbols in docusaurus docs ([#227](https://github.com/airtai/fastkafka/pull/227)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Removed bootstrap servers from constructor ([#220](https://github.com/airtai/fastkafka/pull/220)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n\n## 0.4.0\n\n### New Features\n\n- Integrate FastKafka chat ([#208](https://github.com/airtai/fastkafka/pull/208)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add benchmarking ([#206](https://github.com/airtai/fastkafka/pull/206)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Enable fast testing without running kafka locally ([#198](https://github.com/airtai/fastkafka/pull/198)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Generate docs using Docusaurus ([#194](https://github.com/airtai/fastkafka/pull/194)), thanks to [@harishmohanraj](https://github.com/harishmohanraj)\n\n- Add test cases for LocalRedpandaBroker ([#189](https://github.com/airtai/fastkafka/pull/189)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Reimplement patch and delegates from fastcore ([#188](https://github.com/airtai/fastkafka/pull/188)), thanks to [@Sternakt](https://github.com/Sternakt)\n\n- Rename existing functions into start and stop and add lifespan handler ([#117](https://github.com/airtai/fastkafka/issues/117))\n  - https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios\n\n\n## 0.3.1\n\n-  README.md file updated\n\n\n## 0.3.0\n\n### New Features\n\n- Guide for FastKafka produces using partition key ([#172](https://github.com/airtai/fastkafka/pull/172)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #161\n\n- Add support for Redpanda for testing and deployment ([#181](https://github.com/airtai/fastkafka/pull/181)), thanks to [@kumaranvpl](https://github.com/kumaranvpl)\n\n- Remove bootstrap_servers from __init__ and use the name of broker as an option when running/testing ([#134](https://github.com/airtai/fastkafka/issues/134))\n\n- Add a GH action file to check for broken links in the docs ([#163](https://github.com/airtai/fastkafka/issues/163))\n\n- Optimize requirements for testing and docs ([#151](https://github.com/airtai/fastkafka/issues/151))\n\n- Break requirements into base and optional for testing and dev ([#124](https://github.com/airtai/fastkafka/issues/124))\n  - Minimize base requirements needed just for running the service.\n\n- Add link to example git repo into guide for building docs using actions ([#81](https://github.com/airtai/fastkafka/issues/81))\n\n- Add logging for run_in_background ([#46](https://github.com/airtai/fastkafka/issues/46))\n\n- Implement partition Key mechanism for producers ([#16](https://github.com/airtai/fastkafka/issues/16))\n\n### Bugs Squashed\n\n- Implement checks for npm installation and version ([#176](https://github.com/airtai/fastkafka/pull/176)), thanks to [@Sternakt](https://github.com/Sternakt)\n  - Closes #158 by checking if the npx is installed and more verbose error handling\n\n- Fix the helper.py link in CHANGELOG.md ([#165](https://github.com/airtai/fastkafka/issues/165))\n\n- fastkafka docs install_deps fails ([#157](https://github.com/airtai/fastkafka/issues/157))\n  - Unexpected internal error: [Errno 2] No such file or directory: 'npx'\n\n- Broken links in docs ([#141](https://github.com/airtai/fastkafka/issues/141))\n\n- fastkafka run is not showing up in CLI docs ([#132](https://github.com/airtai/fastkafka/issues/132))\n\n\n## 0.2.3\n\n- Fixed broken links on PyPi index page\n\n\n## 0.2.2\n\n### New Features\n\n- Extract JDK and Kafka installation out of LocalKafkaBroker ([#131](https://github.com/airtai/fastkafka/issues/131))\n\n- PyYAML version relaxed ([#119](https://github.com/airtai/fastkafka/pull/119)), thanks to [@davorrunje](https://github.com/davorrunje)\n\n- Replace docker based kafka with local ([#68](https://github.com/airtai/fastkafka/issues/68))\n  - [x] replace docker compose with a simple docker run (standard run_jupyter.sh should do)\n  - [x] replace all tests to use LocalKafkaBroker\n  - [x] update documentation\n\n### Bugs Squashed\n\n- Fix broken link for FastKafka docs in index notebook ([#145](https://github.com/airtai/fastkafka/issues/145))\n\n- Fix encoding issues when loading setup.py on windows OS ([#135](https://github.com/airtai/fastkafka/issues/135))\n\n\n## 0.2.0\n\n### New Features\n\n- Replace kafka container with LocalKafkaBroker ([#112](https://github.com/airtai/fastkafka/issues/112))\n  - - [x] Replace kafka container with LocalKafkaBroker in tests\n- [x] Remove kafka container from tests environment\n- [x] Fix failing tests\n\n### Bugs Squashed\n\n- Fix random failing in CI ([#109](https://github.com/airtai/fastkafka/issues/109))\n\n\n## 0.1.3\n\n- version update in __init__.py\n\n\n## 0.1.2\n\n### New Features\n\n\n- Git workflow action for publishing Kafka docs ([#78](https://github.com/airtai/fastkafka/issues/78))\n\n\n### Bugs Squashed\n\n- Include missing requirement ([#110](https://github.com/airtai/fastkafka/issues/110))\n  - [x] Typer is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py) but it is not included in [settings.ini](https://github.com/airtai/fastkafka/blob/main/settings.ini)\n  - [x] Add aiohttp which is imported in this [file](https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py)\n  - [x] Add nbformat which is imported in _components/helpers.py\n  - [x] Add nbconvert which is imported in _components/helpers.py\n\n\n## 0.1.1\n\n\n### Bugs Squashed\n\n- JDK install fails on Python 3.8 ([#106](https://github.com/airtai/fastkafka/issues/106))\n\n\n\n## 0.1.0\n\nInitial release\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/CNAME",
    "content": "fastkafka.airt.ai\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/CONTRIBUTING.md",
    "content": "# Contributing to FastKafka\n\nFirst off, thanks for taking the time to contribute! ❤️\n\nAll types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉\n\n> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:\n> - Star the project\n> - Tweet about it\n> - Refer this project in your project's readme\n> - Mention the project at local meetups and tell your friends/colleagues\n\n## Table of Contents\n\n- [I Have a Question](#i-have-a-question)\n- [I Want To Contribute](#i-want-to-contribute)\n  - [Reporting Bugs](#reporting-bugs)\n  - [Suggesting Enhancements](#suggesting-enhancements)\n  - [Your First Code Contribution](#your-first-code-contribution)\n- [Development](#development)\n    - [Prepare the dev environment](#prepare-the-dev-environment)\n    - [Way of working](#way-of-working)\n    - [Before a PR](#before-a-pr)\n\n\n\n## I Have a Question\n\n> If you want to ask a question, we assume that you have read the available [Documentation](https://fastkafka.airt.ai/docs).\n\nBefore you ask a question, it is best to search for existing [Issues](https://github.com/airtai/fastkafka/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.\n\nIf you then still feel the need to ask a question and need clarification, we recommend the following:\n\n- Contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new)\n    - Provide as much context as you can about what you're running into\n\nWe will then take care of the issue as soon as possible.\n\n## I Want To Contribute\n\n> ### Legal Notice \n> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.\n\n### Reporting Bugs\n\n#### Before Submitting a Bug Report\n\nA good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.\n\n- Make sure that you are using the latest version.\n- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](https://fastkafka.airt.ai/docs). If you are looking for support, you might want to check [this section](#i-have-a-question)).\n- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/airtai/fastkafka/issues?q=label%3Abug).\n- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.\n- Collect information about the bug:\n  - Stack trace (Traceback)\n  - OS, Platform and Version (Windows, Linux, macOS, x86, ARM)\n  - Python version\n  - Possibly your input and the output\n  - Can you reliably reproduce the issue? And can you also reproduce it with older versions?\n\n#### How Do I Submit a Good Bug Report?\n\nWe use GitHub issues to track bugs and errors. If you run into an issue with the project:\n\n- Open an [Issue](https://github.com/airtai/fastkafka/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)\n- Explain the behavior you would expect and the actual behavior.\n- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.\n- Provide the information you collected in the previous section.\n\nOnce it's filed:\n\n- The project team will label the issue accordingly.\n- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.\n- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented.\n\n### Suggesting Enhancements\n\nThis section guides you through submitting an enhancement suggestion for FastKafka, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.\n\n#### Before Submitting an Enhancement\n\n- Make sure that you are using the latest version.\n- Read the [documentation](https://fastkafka.airt.ai/docs) carefully and find out if the functionality is already covered, maybe by an individual configuration.\n- Perform a [search](https://github.com/airtai/fastkafka/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.\n- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.\n- If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on [Discord](https://discord.com/invite/CJWmYpyFbc)\n\n#### How Do I Submit a Good Enhancement Suggestion?\n\nEnhancement suggestions are tracked as [GitHub issues](https://github.com/airtai/fastkafka/issues).\n\n- Use a **clear and descriptive title** for the issue to identify the suggestion.\n- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.\n- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.\n- **Explain why this enhancement would be useful** to most FastKafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.\n\n### Your First Code Contribution\n\nA great way to start contributing to FastKafka would be by solving an issue tagged with \"good first issue\". To find a list of issues that are tagged as \"good first issue\" and are suitable for newcomers, please visit the following link: [Good first issues](https://github.com/airtai/fastkafka/labels/good%20first%20issue)\n\nThese issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in [Way of working](#way-of-working) and [Before a PR](#before-a-pr), and help us make FastKafka even better!\n\nIf you have any questions or need further assistance, feel free to reach out to us. Happy coding!\n\n## Development\n\n### Prepare the dev environment\n\nTo start contributing to FastKafka, you first have to prepare the development environment.\n\n#### Clone the FastKafka repository\n\nTo clone the repository, run the following command in the CLI:\n\n```shell\ngit clone https://github.com/airtai/fastkafka.git\n```\n\n#### Optional: create a virtual python environment\n\nTo prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your FastKafka project by running:\n\n```shell\npython3 -m venv fastkafka-env\n```\n\nAnd to activate your virtual environment run:\n\n```shell\nsource fastkafka-env/bin/activate\n```\n\nTo learn more about virtual environments, please have a look at [official python documentation](https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available.)\n\n#### Install FastKafka\n\nTo install FastKafka, navigate to the root directory of the cloned FastKafka project and run:\n\n```shell\npip install fastkafka -e [.\"dev\"]\n```\n\n#### Install JRE and Kafka toolkit\n\nTo be able to run tests and use all the functionalities of FastKafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:\n\n1. Use our `fastkafka testing install-deps` CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR\n2. Install JRE and Kafka manually.\n   To do this, please refer to [JDK and JRE installation guide](https://docs.oracle.com/javase/9/install/toc.htm) and [Apache Kafka quickstart](https://kafka.apache.org/quickstart)\n   \n#### Install npm\n\nTo be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:\n\n1. Use our `fastkafka docs install_deps` CLI command which will install npm for you in your .local folder\nOR\n2. Install npm manually.\n   To do this, please refer to [NPM installation guide](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)\n   \n#### Install docusaurus\n\nTo generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of FastKafka project.\n\n#### Check if everything works\n\nAfter installing FastKafka and all the necessary dependencies, run `nbdev_test` in the root of FastKafka project. This will take a couple of minutes as it will run all the tests on FastKafka project. If everythng is setup correctly, you will get a \"Success.\" message in your terminal, otherwise please refer to previous steps.\n\n### Way of working\n\nThe development of FastKafka is done in Jupyter notebooks. Inside the `nbs` directory you will find all the source code of FastKafka, this is where you will implement your changes.\n\nThe testing, cleanup and exporting of the code is being handled by `nbdev`, please, before starting the work on FastKafka, get familiar with it by reading [nbdev documentation](https://nbdev.fast.ai/getting_started.html).\n\nThe general philosopy you should follow when writing code for FastKafka is:\n\n- Function should be an atomic functionality, short and concise\n   - Good rule of thumb: your function should be 5-10 lines long usually\n- If there are more than 2 params, enforce keywording using *\n   - E.g.: `def function(param1, *, param2, param3): ...`\n- Define typing of arguments and return value\n   - If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected\n- After the function cell, write test cells using the assert keyword\n   - Whenever you implement something you should test that functionality immediately in the cells below \n- Add Google style python docstrings when function is implemented and tested\n\n### Before a PR\n\nAfter you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of FastKafka project):\n\n1. Format your notebooks: `nbqa black nbs`\n2. Close, shutdown, and clean the metadata from your notebooks: `nbdev_clean`\n3. Export your code: `nbdev_export`\n4. Run the tests: `nbdev_test`\n5. Test code typing: `mypy fastkafka`\n6. Test code safety with bandit: `bandit -r fastkafka`\n7. Test code safety with semgrep: `semgrep --config auto -r fastkafka`\n\nWhen you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.\n\n## Attribution\nThis guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/LICENSE.md",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/EventMetadata.md",
    "content": "## fastkafka.EventMetadata {#fastkafka.EventMetadata}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/aiokafka_consumer_loop.py#L27-L77\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n\nA class for encapsulating Kafka record metadata.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `topic` | `str` | The topic this record is received from | *required* |\n| `partition` | `int` | The partition from which this record is received | *required* |\n| `offset` | `int` | The position of this record in the corresponding Kafka partition | *required* |\n| `timestamp` | `int` | The timestamp of this record | *required* |\n| `timestamp_type` | `int` | The timestamp type of this record | *required* |\n| `key` | `Optional[bytes]` | The key (or `None` if no key is specified) | *required* |\n| `value` | `Optional[bytes]` | The value | *required* |\n| `serialized_key_size` | `int` | The size of the serialized, uncompressed key in bytes | *required* |\n| `serialized_value_size` | `int` | The size of the serialized, uncompressed value in bytes | *required* |\n| `headers` | `Sequence[Tuple[str, bytes]]` | The headers | *required* |\n\n### create_event_metadata {#fastkafka._components.aiokafka_consumer_loop.EventMetadata.create_event_metadata}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/aiokafka_consumer_loop.py#L56-L77\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@staticmethod\ncreate_event_metadata(\n    record\n)\n```\n\nCreates an instance of EventMetadata from a ConsumerRecord.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `record` | `ConsumerRecord` | The Kafka ConsumerRecord. | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `EventMetadata` | The created EventMetadata instance. |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/FastKafka.md",
    "content": "### __init__ {#fastkafka._application.app.FastKafka.init}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L179-L305\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n__init__(\n    self,\n    title=None,\n    description=None,\n    version=None,\n    contact=None,\n    kafka_brokers=None,\n    root_path=None,\n    lifespan=None,\n    bootstrap_servers_id='localhost',\n    loop=None,\n    client_id=None,\n    metadata_max_age_ms=300000,\n    request_timeout_ms=40000,\n    api_version='auto',\n    acks=<object object at 0x7ff10d5f9100>,\n    key_serializer=None,\n    value_serializer=None,\n    compression_type=None,\n    max_batch_size=16384,\n    partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,\n    max_request_size=1048576,\n    linger_ms=0,\n    send_backoff_ms=100,\n    retry_backoff_ms=100,\n    security_protocol='PLAINTEXT',\n    ssl_context=None,\n    connections_max_idle_ms=540000,\n    enable_idempotence=False,\n    transactional_id=None,\n    transaction_timeout_ms=60000,\n    sasl_mechanism='PLAIN',\n    sasl_plain_password=None,\n    sasl_plain_username=None,\n    sasl_kerberos_service_name='kafka',\n    sasl_kerberos_domain_name=None,\n    sasl_oauth_token_provider=None,\n    group_id=None,\n    key_deserializer=None,\n    value_deserializer=None,\n    fetch_max_wait_ms=500,\n    fetch_max_bytes=52428800,\n    fetch_min_bytes=1,\n    max_partition_fetch_bytes=1048576,\n    auto_offset_reset='latest',\n    enable_auto_commit=True,\n    auto_commit_interval_ms=5000,\n    check_crcs=True,\n    partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n    max_poll_interval_ms=300000,\n    rebalance_timeout_ms=None,\n    session_timeout_ms=10000,\n    heartbeat_interval_ms=3000,\n    consumer_timeout_ms=200,\n    max_poll_records=None,\n    exclude_internal_topics=True,\n    isolation_level='read_uncommitted',\n)\n```\n\nCreates FastKafka application\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `title` | `Optional[str]` | optional title for the documentation. If None,the title will be set to empty string | `None` |\n| `description` | `Optional[str]` | optional description for the documentation. IfNone, the description will be set to empty string | `None` |\n| `version` | `Optional[str]` | optional version for the documentation. If None,the version will be set to empty string | `None` |\n| `contact` | `Optional[Dict[str, str]]` | optional contact for the documentation. If None, thecontact will be set to placeholder values:name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com' | `None` |\n| `kafka_brokers` | `Optional[Dict[str, Any]]` | dictionary describing kafka brokers used for settingthe bootstrap server when running the applicationa and forgenerating documentation. Defaults to    {        \"localhost\": {            \"url\": \"localhost\",            \"description\": \"local kafka broker\",            \"port\": \"9092\",        }    } | `None` |\n| `root_path` | `Union[pathlib.Path, str, NoneType]` | path to where documentation will be created | `None` |\n| `lifespan` | `Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]]` | asynccontextmanager that is used for setting lifespan hooks.__aenter__ is called before app start and __aexit__ after app stop.The lifespan is called whe application is started as async contextmanager, e.g.:`async with kafka_app...` | `None` |\n| `client_id` |  | a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ``aiokafka-producer-#`` (appended with a unique numberper instance) | `None` |\n| `key_serializer` |  | used to convert user-supplied keys to bytesIf not :data:`None`, called as ``f(key),`` should return:class:`bytes`.Default: :data:`None`. | `None` |\n| `value_serializer` |  | used to convert user-supplied messagevalues to :class:`bytes`. If not :data:`None`, called as``f(value)``, should return :class:`bytes`.Default: :data:`None`. | `None` |\n| `acks` |  | one of ``0``, ``1``, ``all``. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:* ``0``: Producer will not wait for any acknowledgment from the server  at all. The message will immediately be added to the socket  buffer and considered sent. No guarantee can be made that the  server has received the record in this case, and the retries  configuration will not take effect (as the client won't  generally know of any failures). The offset given back for each  record will always be set to -1.* ``1``: The broker leader will write the record to its local log but  will respond without awaiting full acknowledgement from all  followers. In this case should the leader fail immediately  after acknowledging the record but before the followers have  replicated it then the record will be lost.* ``all``: The broker leader will wait for the full set of in-sync  replicas to acknowledge the record. This guarantees that the  record will not be lost as long as at least one in-sync replica  remains alive. This is the strongest available guarantee.If unset, defaults to ``acks=1``. If `enable_idempotence` is:data:`True` defaults to ``acks=all`` | `<object object at 0x7ff10d5f9100>` |\n| `compression_type` |  | The compression type for all data generated bythe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``or :data:`None`.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:`None`. | `None` |\n| `max_batch_size` |  | Maximum size of buffered data per partition.After this amount :meth:`send` coroutine will block until batch isdrained.Default: 16384 | `16384` |\n| `linger_ms` |  | The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than `linger_ms`, producer will wait ``linger_ms - process_time``.Default: 0 (i.e. no delay). | `0` |\n| `partitioner` |  | Callable used to determine which partitioneach message is assigned to. Called (after key serialization):``partitioner(key_bytes, all_partitions, available_partitions)``.The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:`None`, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible). | `<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>` |\n| `max_request_size` |  | The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576. | `1048576` |\n| `metadata_max_age_ms` |  | The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000 | `300000` |\n| `request_timeout_ms` |  | Produce request timeout in milliseconds.As it's sent as part of:class:`~kafka.protocol.produce.ProduceRequest` (it's a blockingcall), maximum waiting time can be up to ``2 *request_timeout_ms``.Default: 40000. | `40000` |\n| `retry_backoff_ms` |  | Milliseconds to backoff when retrying onerrors. Default: 100. | `100` |\n| `api_version` |  | specify which kafka API version to use.If set to ``auto``, will attempt to infer the broker version byprobing various APIs. Default: ``auto`` | `'auto'` |\n| `security_protocol` |  | Protocol used to communicate with brokers.Valid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,``SASL_SSL``. Default: ``PLAINTEXT``. | `'PLAINTEXT'` |\n| `ssl_context` |  | pre-configured :class:`~ssl.SSLContext`for wrapping socket connections. Directly passed into asyncio's:meth:`~asyncio.loop.create_connection`. For moreinformation see :ref:`ssl_auth`.Default: :data:`None` | `None` |\n| `connections_max_idle_ms` |  | Close idle connections after the numberof milliseconds specified by this config. Specifying :data:`None` willdisable idle checks. Default: 540000 (9 minutes). | `540000` |\n| `enable_idempotence` |  | When set to :data:`True`, the producer willensure that exactly one copy of each message is written in thestream. If :data:`False`, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ``all``. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:`ValueError` will be thrown.New in version 0.5.0. | `False` |\n| `sasl_mechanism` |  | Authentication mechanism when security_protocolis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid valuesare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,``OAUTHBEARER``.Default: ``PLAIN`` | `'PLAIN'` |\n| `sasl_plain_username` |  | username for SASL ``PLAIN`` authentication.Default: :data:`None` | `None` |\n| `sasl_plain_password` |  | password for SASL ``PLAIN`` authentication.Default: :data:`None` | `None` |\n| `group_id` |  | name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None | `None` |\n| `key_deserializer` |  | Any callable that takes araw message key and returns a deserialized key. | `None` |\n| `value_deserializer` |  | Any callable that takes araw message value and returns a deserialized value. | `None` |\n| `fetch_min_bytes` |  | Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to`fetch_max_wait_ms` for more data to accumulate. Default: 1. | `1` |\n| `fetch_max_bytes` |  | The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb). | `52428800` |\n| `fetch_max_wait_ms` |  | The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500. | `500` |\n| `max_partition_fetch_bytes` |  | The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ``= #partitions * max_partition_fetch_bytes``.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576. | `1048576` |\n| `max_poll_records` |  | The maximum number of records returned in asingle call to :meth:`.getmany`. Defaults ``None``, no limit. | `None` |\n| `auto_offset_reset` |  | A policy for resetting offsets on:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldestavailable message, ``latest`` will move to the most recent, and``none`` will raise an exception so you can handle this case.Default: ``latest``. | `'latest'` |\n| `enable_auto_commit` |  | If true the consumer's offset will beperiodically committed in the background. Default: True. | `True` |\n| `auto_commit_interval_ms` |  | milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000. | `5000` |\n| `check_crcs` |  | Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True | `True` |\n| `partition_assignment_strategy` |  | List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:`.RoundRobinPartitionAssignor`] | `(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)` |\n| `max_poll_interval_ms` |  | Maximum allowed time between calls toconsume messages (e.g., :meth:`.getmany`). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See `KIP-62`_ for moreinformation. Default 300000 | `300000` |\n| `rebalance_timeout_ms` |  | The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to `max.poll.interval.ms` configuration,but as ``aiokafka`` will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaultsto ``session_timeout_ms`` | `None` |\n| `session_timeout_ms` |  | Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(`heartbeat.interval.ms`) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe **broker** configuration properties`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.Default: 10000 | `10000` |\n| `heartbeat_interval_ms` |  | The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than `session_timeout_ms`, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000 | `3000` |\n| `consumer_timeout_ms` |  | maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200 | `200` |\n| `exclude_internal_topics` |  | Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True | `True` |\n| `isolation_level` |  | Controls how to read messages writtentransactionally.If set to ``read_committed``, :meth:`.getmany` will only returntransactional messages which have been committed.If set to ``read_uncommitted`` (the default), :meth:`.getmany` willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in`read_committed` mode, :meth:`.getmany` will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, `read_committed` consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in `read_committed` the seek_to_end method willreturn the LSO. See method docs below. Default: ``read_uncommitted`` | `'read_uncommitted'` |\n| `sasl_oauth_token_provider` |  | OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).Default: None | `None` |\n\n### benchmark {#fastkafka._application.app.FastKafka.benchmark}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1108-L1159\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nbenchmark(\n    self, interval=1, sliding_window_size=None\n)\n```\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `interval` | `Union[int, datetime.timedelta]` | Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second | `1` |\n| `sliding_window_size` | `Optional[int]` | The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculated | `None` |\n\n### consumes {#fastkafka._application.app.FastKafka.consumes}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L474-L557\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nconsumes(\n    self,\n    topic=None,\n    decoder='json',\n    executor=None,\n    brokers=None,\n    prefix='on_',\n    description=None,\n    loop=None,\n    bootstrap_servers='localhost',\n    client_id='aiokafka-0.8.1',\n    group_id=None,\n    key_deserializer=None,\n    value_deserializer=None,\n    fetch_max_wait_ms=500,\n    fetch_max_bytes=52428800,\n    fetch_min_bytes=1,\n    max_partition_fetch_bytes=1048576,\n    request_timeout_ms=40000,\n    retry_backoff_ms=100,\n    auto_offset_reset='latest',\n    enable_auto_commit=True,\n    auto_commit_interval_ms=5000,\n    check_crcs=True,\n    metadata_max_age_ms=300000,\n    partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n    max_poll_interval_ms=300000,\n    rebalance_timeout_ms=None,\n    session_timeout_ms=10000,\n    heartbeat_interval_ms=3000,\n    consumer_timeout_ms=200,\n    max_poll_records=None,\n    ssl_context=None,\n    security_protocol='PLAINTEXT',\n    api_version='auto',\n    exclude_internal_topics=True,\n    connections_max_idle_ms=540000,\n    isolation_level='read_uncommitted',\n    sasl_mechanism='PLAIN',\n    sasl_plain_password=None,\n    sasl_plain_username=None,\n    sasl_kerberos_service_name='kafka',\n    sasl_kerberos_domain_name=None,\n    sasl_oauth_token_provider=None,\n)\n```\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `topic` | `Optional[str]` | Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefix | `None` |\n| `decoder` | `Union[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]` | Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function. | `'json'` |\n| `executor` | `Union[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]` | Type of executor to choose for consuming tasks. Avaliable optionsare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\"SequentialExecutor\" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use\"DynamicTaskExecutor\" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking. | `None` |\n| `prefix` | `str` | Prefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: \"on_\". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError | `'on_'` |\n| `brokers` | `Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]` | Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka. | `None` |\n| `description` | `Optional[str]` | Optional description of the consuming function async docs.If not provided, consuming function __doc__ attr will be used. | `None` |\n| `bootstrap_servers` |  | a ``host[:port]`` string (or list of``host[:port]`` strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to ``localhost:9092``. | `'localhost'` |\n| `client_id` |  | a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`for logging with respect to consumer group administration. Default:``aiokafka-{version}`` | `'aiokafka-0.8.1'` |\n| `group_id` |  | name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None | `None` |\n| `key_deserializer` |  | Any callable that takes araw message key and returns a deserialized key. | `None` |\n| `value_deserializer` |  | Any callable that takes araw message value and returns a deserialized value. | `None` |\n| `fetch_min_bytes` |  | Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to`fetch_max_wait_ms` for more data to accumulate. Default: 1. | `1` |\n| `fetch_max_bytes` |  | The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb). | `52428800` |\n| `fetch_max_wait_ms` |  | The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500. | `500` |\n| `max_partition_fetch_bytes` |  | The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ``= #partitions * max_partition_fetch_bytes``.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576. | `1048576` |\n| `max_poll_records` |  | The maximum number of records returned in asingle call to :meth:`.getmany`. Defaults ``None``, no limit. | `None` |\n| `request_timeout_ms` |  | Client request timeout in milliseconds.Default: 40000. | `40000` |\n| `retry_backoff_ms` |  | Milliseconds to backoff when retrying onerrors. Default: 100. | `100` |\n| `auto_offset_reset` |  | A policy for resetting offsets on:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldestavailable message, ``latest`` will move to the most recent, and``none`` will raise an exception so you can handle this case.Default: ``latest``. | `'latest'` |\n| `enable_auto_commit` |  | If true the consumer's offset will beperiodically committed in the background. Default: True. | `True` |\n| `auto_commit_interval_ms` |  | milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000. | `5000` |\n| `check_crcs` |  | Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True | `True` |\n| `metadata_max_age_ms` |  | The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000 | `300000` |\n| `partition_assignment_strategy` |  | List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:`.RoundRobinPartitionAssignor`] | `(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)` |\n| `max_poll_interval_ms` |  | Maximum allowed time between calls toconsume messages (e.g., :meth:`.getmany`). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See `KIP-62`_ for moreinformation. Default 300000 | `300000` |\n| `rebalance_timeout_ms` |  | The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to `max.poll.interval.ms` configuration,but as ``aiokafka`` will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaultsto ``session_timeout_ms`` | `None` |\n| `session_timeout_ms` |  | Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(`heartbeat.interval.ms`) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe **broker** configuration properties`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.Default: 10000 | `10000` |\n| `heartbeat_interval_ms` |  | The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than `session_timeout_ms`, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000 | `3000` |\n| `consumer_timeout_ms` |  | maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200 | `200` |\n| `api_version` |  | specify which kafka API version to use.:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.If set to ``auto``, will attempt to infer the broker version byprobing various APIs. Default: ``auto`` | `'auto'` |\n| `security_protocol` |  | Protocol used to communicate with brokers.Valid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,``SASL_SSL``. Default: ``PLAINTEXT``. | `'PLAINTEXT'` |\n| `ssl_context` |  | pre-configured :class:`~ssl.SSLContext`for wrapping socket connections. Directly passed into asyncio's:meth:`~asyncio.loop.create_connection`. For more information see:ref:`ssl_auth`. Default: None. | `None` |\n| `exclude_internal_topics` |  | Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True | `True` |\n| `connections_max_idle_ms` |  | Close idle connections after the numberof milliseconds specified by this config. Specifying `None` willdisable idle checks. Default: 540000 (9 minutes). | `540000` |\n| `isolation_level` |  | Controls how to read messages writtentransactionally.If set to ``read_committed``, :meth:`.getmany` will only returntransactional messages which have been committed.If set to ``read_uncommitted`` (the default), :meth:`.getmany` willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in`read_committed` mode, :meth:`.getmany` will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, `read_committed` consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in `read_committed` the seek_to_end method willreturn the LSO. See method docs below. Default: ``read_uncommitted`` | `'read_uncommitted'` |\n| `sasl_mechanism` |  | Authentication mechanism when security_protocolis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,``OAUTHBEARER``.Default: ``PLAIN`` | `'PLAIN'` |\n| `sasl_plain_username` |  | username for SASL ``PLAIN`` authentication.Default: None | `None` |\n| `sasl_plain_password` |  | password for SASL ``PLAIN`` authentication.Default: None | `None` |\n| `sasl_oauth_token_provider` |  | OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).Default: None | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]` | : A function returning the same function |\n\n### create_docs {#fastkafka._application.app.FastKafka.create_docs}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L938-L964\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\ncreate_docs(\n    self\n)\n```\n\nCreate the asyncapi documentation based on the configured consumers and producers.\n\nThis function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers.\n\nNote:\n    The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\n    attribute of the FastKafka instance.\n\n### create_mocks {#fastkafka._application.app.FastKafka.create_mocks}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1026-L1104\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\ncreate_mocks(\n    self\n)\n```\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### fastapi_lifespan {#fastkafka._application.app.FastKafka.fastapi_lifespan}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1163-L1182\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nfastapi_lifespan(\n    self, kafka_broker_name\n)\n```\n\nMethod for managing the lifespan of a FastAPI application with a specific Kafka broker.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `kafka_broker_name` | `str` | The name of the Kafka broker to start FastKafka | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]` | Lifespan function to use for initializing FastAPI |\n\n### get_topics {#fastkafka._application.app.FastKafka.get_topics}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L663-L672\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nget_topics(\n    self\n)\n```\n\nGet all topics for both producing and consuming.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Iterable[str]` | A set of topics for both producing and consuming. |\n\n### is_started {#fastkafka._application.app.FastKafka.is_started}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L308-L319\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@property\nis_started(\n    self\n)\n```\n\nProperty indicating whether the FastKafka object is started.\n\nThe is_started property indicates if the FastKafka object is currently\nin a started state. This implies that all background tasks, producers,\nand consumers have been initiated, and the object is successfully connected\nto the Kafka broker.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `bool` | True if the object is started, False otherwise. |\n\n### produces {#fastkafka._application.app.FastKafka.produces}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L582-L659\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nproduces(\n    self,\n    topic=None,\n    encoder='json',\n    prefix='to_',\n    brokers=None,\n    description=None,\n    loop=None,\n    bootstrap_servers='localhost',\n    client_id=None,\n    metadata_max_age_ms=300000,\n    request_timeout_ms=40000,\n    api_version='auto',\n    acks=<object object at 0x7ff10d5f9100>,\n    key_serializer=None,\n    value_serializer=None,\n    compression_type=None,\n    max_batch_size=16384,\n    partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,\n    max_request_size=1048576,\n    linger_ms=0,\n    send_backoff_ms=100,\n    retry_backoff_ms=100,\n    security_protocol='PLAINTEXT',\n    ssl_context=None,\n    connections_max_idle_ms=540000,\n    enable_idempotence=False,\n    transactional_id=None,\n    transaction_timeout_ms=60000,\n    sasl_mechanism='PLAIN',\n    sasl_plain_password=None,\n    sasl_plain_username=None,\n    sasl_kerberos_service_name='kafka',\n    sasl_kerberos_domain_name=None,\n    sasl_oauth_token_provider=None,\n)\n```\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `topic` | `Optional[str]` | Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix. | `None` |\n| `encoder` | `Union[str, Callable[[pydantic.main.BaseModel], bytes]]` | Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function. | `'json'` |\n| `prefix` | `str` | Prefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: \"to_\". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError | `'to_'` |\n| `brokers` | `Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]` | Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka. | `None` |\n| `description` | `Optional[str]` | Optional description of the producing function async docs.If not provided, producing function __doc__ attr will be used. | `None` |\n| `bootstrap_servers` |  | a ``host[:port]`` string or list of``host[:port]`` strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list.  It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to ``localhost:9092``. | `'localhost'` |\n| `client_id` |  | a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ``aiokafka-producer-#`` (appended with a unique numberper instance) | `None` |\n| `key_serializer` |  | used to convert user-supplied keys to bytesIf not :data:`None`, called as ``f(key),`` should return:class:`bytes`.Default: :data:`None`. | `None` |\n| `value_serializer` |  | used to convert user-supplied messagevalues to :class:`bytes`. If not :data:`None`, called as``f(value)``, should return :class:`bytes`.Default: :data:`None`. | `None` |\n| `acks` |  | one of ``0``, ``1``, ``all``. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:* ``0``: Producer will not wait for any acknowledgment from the server  at all. The message will immediately be added to the socket  buffer and considered sent. No guarantee can be made that the  server has received the record in this case, and the retries  configuration will not take effect (as the client won't  generally know of any failures). The offset given back for each  record will always be set to -1.* ``1``: The broker leader will write the record to its local log but  will respond without awaiting full acknowledgement from all  followers. In this case should the leader fail immediately  after acknowledging the record but before the followers have  replicated it then the record will be lost.* ``all``: The broker leader will wait for the full set of in-sync  replicas to acknowledge the record. This guarantees that the  record will not be lost as long as at least one in-sync replica  remains alive. This is the strongest available guarantee.If unset, defaults to ``acks=1``. If `enable_idempotence` is:data:`True` defaults to ``acks=all`` | `<object object at 0x7ff10d5f9100>` |\n| `compression_type` |  | The compression type for all data generated bythe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``or :data:`None`.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:`None`. | `None` |\n| `max_batch_size` |  | Maximum size of buffered data per partition.After this amount :meth:`send` coroutine will block until batch isdrained.Default: 16384 | `16384` |\n| `linger_ms` |  | The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than `linger_ms`, producer will wait ``linger_ms - process_time``.Default: 0 (i.e. no delay). | `0` |\n| `partitioner` |  | Callable used to determine which partitioneach message is assigned to. Called (after key serialization):``partitioner(key_bytes, all_partitions, available_partitions)``.The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:`None`, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible). | `<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>` |\n| `max_request_size` |  | The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576. | `1048576` |\n| `metadata_max_age_ms` |  | The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000 | `300000` |\n| `request_timeout_ms` |  | Produce request timeout in milliseconds.As it's sent as part of:class:`~kafka.protocol.produce.ProduceRequest` (it's a blockingcall), maximum waiting time can be up to ``2 *request_timeout_ms``.Default: 40000. | `40000` |\n| `retry_backoff_ms` |  | Milliseconds to backoff when retrying onerrors. Default: 100. | `100` |\n| `api_version` |  | specify which kafka API version to use.If set to ``auto``, will attempt to infer the broker version byprobing various APIs. Default: ``auto`` | `'auto'` |\n| `security_protocol` |  | Protocol used to communicate with brokers.Valid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,``SASL_SSL``. Default: ``PLAINTEXT``. | `'PLAINTEXT'` |\n| `ssl_context` |  | pre-configured :class:`~ssl.SSLContext`for wrapping socket connections. Directly passed into asyncio's:meth:`~asyncio.loop.create_connection`. For moreinformation see :ref:`ssl_auth`.Default: :data:`None` | `None` |\n| `connections_max_idle_ms` |  | Close idle connections after the numberof milliseconds specified by this config. Specifying :data:`None` willdisable idle checks. Default: 540000 (9 minutes). | `540000` |\n| `enable_idempotence` |  | When set to :data:`True`, the producer willensure that exactly one copy of each message is written in thestream. If :data:`False`, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ``all``. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:`ValueError` will be thrown.New in version 0.5.0. | `False` |\n| `sasl_mechanism` |  | Authentication mechanism when security_protocolis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid valuesare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,``OAUTHBEARER``.Default: ``PLAIN`` | `'PLAIN'` |\n| `sasl_plain_username` |  | username for SASL ``PLAIN`` authentication.Default: :data:`None` | `None` |\n| `sasl_plain_password` |  | password for SASL ``PLAIN`` authentication.Default: :data:`None` | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]` | : A function returning the same function |\n\n**Exceptions**:\n\n|  Type | Description |\n|---|---|\n| `ValueError` | when needed |\n\n### run_in_background {#fastkafka._application.app.FastKafka.run_in_background}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L676-L709\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nrun_in_background(\n    self\n)\n```\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]` | A decorator function that takes a background task as an input and stores it to be run in the backround. |\n\n### set_kafka_broker {#fastkafka._application.app.FastKafka.set_kafka_broker}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L321-L337\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nset_kafka_broker(\n    self, kafka_broker_name\n)\n```\n\nSets the Kafka broker to start FastKafka with\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `kafka_broker_name` | `str` | The name of the Kafka broker to start FastKafka | *required* |\n\n**Exceptions**:\n\n|  Type | Description |\n|---|---|\n| `ValueError` | If the provided kafka_broker_name is not found in dictionary of kafka_brokers |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/KafkaEvent.md",
    "content": "## fastkafka.KafkaEvent {#fastkafka.KafkaEvent}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/producer_decorator.py#L36-L46\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n\nA generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `message` | `BaseSubmodel` | The message contained in the Kafka event, can be of type pydantic.BaseModel. | *required* |\n| `key` | `Optional[bytes]` | The optional key used to identify the Kafka event. | `None` |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/encoder/AvroBase.md",
    "content": "## fastkafka.encoder.AvroBase {#fastkafka.encoder.AvroBase}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L22-L235\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n\nThis is base pydantic class that will add some methods\n\n### __init__ {#pydantic.main.BaseModel.init}\n\n\n\n```py\n__init__(\n    __pydantic_self__, data\n)\n```\n\nCreate a new model by parsing and validating input data from keyword arguments.\n\nRaises ValidationError if the input data cannot be parsed to form a valid model.\n\nUses `__pydantic_self__` instead of the more common `self` for the first arg to\nallow `self` as a field name.\n\n### avro_schema {#fastkafka._components.encoder.avro.AvroBase.avro_schema}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L80-L99\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@classmethod\navro_schema(\n    by_alias=True, namespace=None\n)\n```\n\nReturns the Avro schema for the Pydantic class.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `by_alias` | `bool` | Generate schemas using aliases defined. Defaults to True. | `True` |\n| `namespace` | `Optional[str]` | Optional namespace string for schema generation. | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Dict[str, Any]` | The Avro schema for the model. |\n\n### avro_schema_for_pydantic_class {#fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_class}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L53-L77\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@classmethod\navro_schema_for_pydantic_class(\n    pydantic_model, by_alias=True, namespace=None\n)\n```\n\nReturns the Avro schema for the given Pydantic class.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `pydantic_model` | `Type[pydantic.main.BaseModel]` | The Pydantic class. | *required* |\n| `by_alias` | `bool` | Generate schemas using aliases defined. Defaults to True. | `True` |\n| `namespace` | `Optional[str]` | Optional namespace string for schema generation. | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Dict[str, Any]` | The Avro schema for the model. |\n\n### avro_schema_for_pydantic_object {#fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_object}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L26-L50\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@classmethod\navro_schema_for_pydantic_object(\n    pydantic_model, by_alias=True, namespace=None\n)\n```\n\nReturns the Avro schema for the given Pydantic object.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `pydantic_model` | `BaseModel` | The Pydantic object. | *required* |\n| `by_alias` | `bool` | Generate schemas using aliases defined. Defaults to True. | `True` |\n| `namespace` | `Optional[str]` | Optional namespace string for schema generation. | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Dict[str, Any]` | The Avro schema for the model. |\n\n### copy {#pydantic.main.BaseModel.copy}\n\n\n\n```py\ncopy(\n    self, include=None, exclude=None, update=None, deep=False\n)\n```\n\nReturns a copy of the model.\n\nThis method is now deprecated; use `model_copy` instead. If you need `include` or `exclude`, use:\n\n```py\ndata = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n```\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `include` | AbstractSetIntStr | MappingIntStrAny | None | Optional set or mappingspecifying which fields to include in the copied model. | `None` |\n| `exclude` | AbstractSetIntStr | MappingIntStrAny | None | Optional set or mappingspecifying which fields to exclude in the copied model. | `None` |\n| `update` | `Dict[str, Any] | None` | Optional dictionary of field-value pairs to override field valuesin the copied model. | `None` |\n| `deep` | bool | If True, the values of fields that are Pydantic models will be deep copied. | `False` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Model` | A copy of the model with included, excluded and updated fields as specified. |\n\n### model_computed_fields {#pydantic.main.BaseModel.model_computed_fields}\n\n\n\n```py\n@property\nmodel_computed_fields(\n    self\n)\n```\n\nGet the computed fields of this model instance.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `dict[str, ComputedFieldInfo]` | A dictionary of computed field names and their corresponding `ComputedFieldInfo` objects. |\n\n### model_construct {#pydantic.main.BaseModel.model_construct}\n\n\n\n```py\n@classmethod\nmodel_construct(\n    _fields_set=None, values\n)\n```\n\nCreates a new instance of the `Model` class with validated data.\n\nCreates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data.\nDefault values are respected, but no other validation is performed.\nBehaves as if `Config.extra = 'allow'` was set since it adds all passed values\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `_fields_set` | set[str] | None | The set of field names accepted for the Model instance. | `None` |\n| `values` | Any | Trusted or pre-validated data dictionary. | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Model` | A new instance of the `Model` class with validated data. |\n\n### model_copy {#pydantic.main.BaseModel.model_copy}\n\n\n\n```py\nmodel_copy(\n    self, update=None, deep=False\n)\n```\n\nReturns a copy of the model.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `update` | dict[str, Any] | None | Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. | `None` |\n| `deep` | bool | Set to `True` to make a deep copy of the model. | `False` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Model` | New model instance. |\n\n### model_dump {#pydantic.main.BaseModel.model_dump}\n\n\n\n```py\nmodel_dump(\n    self,\n    mode='python',\n    include=None,\n    exclude=None,\n    by_alias=False,\n    exclude_unset=False,\n    exclude_defaults=False,\n    exclude_none=False,\n    round_trip=False,\n    warnings=True,\n)\n```\n\nUsage docs: https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump\n\nGenerate a dictionary representation of the model, optionally specifying which fields to include or exclude.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `mode` | Literal['json', 'python'] | str | The mode in which `to_python` should run.If mode is 'json', the dictionary will only contain JSON serializable types.If mode is 'python', the dictionary may contain any Python objects. | `'python'` |\n| `include` | IncEx | A list of fields to include in the output. | `None` |\n| `exclude` | IncEx | A list of fields to exclude from the output. | `None` |\n| `by_alias` | bool | Whether to use the field's alias in the dictionary key if defined. | `False` |\n| `exclude_unset` | bool | Whether to exclude fields that are unset or None from the output. | `False` |\n| `exclude_defaults` | bool | Whether to exclude fields that are set to their default value from the output. | `False` |\n| `exclude_none` | bool | Whether to exclude fields that have a value of `None` from the output. | `False` |\n| `round_trip` | bool | Whether to enable serialization and deserialization round-trip support. | `False` |\n| `warnings` | bool | Whether to log warnings when invalid fields are encountered. | `True` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `dict[str, Any]` | A dictionary representation of the model. |\n\n### model_dump_json {#pydantic.main.BaseModel.model_dump_json}\n\n\n\n```py\nmodel_dump_json(\n    self,\n    indent=None,\n    include=None,\n    exclude=None,\n    by_alias=False,\n    exclude_unset=False,\n    exclude_defaults=False,\n    exclude_none=False,\n    round_trip=False,\n    warnings=True,\n)\n```\n\nUsage docs: https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump_json\n\nGenerates a JSON representation of the model using Pydantic's `to_json` method.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `indent` | int | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | `None` |\n| `include` | IncEx | Field(s) to include in the JSON output. Can take either a string or set of strings. | `None` |\n| `exclude` | IncEx | Field(s) to exclude from the JSON output. Can take either a string or set of strings. | `None` |\n| `by_alias` | bool | Whether to serialize using field aliases. | `False` |\n| `exclude_unset` | bool | Whether to exclude fields that have not been explicitly set. | `False` |\n| `exclude_defaults` | bool | Whether to exclude fields that have the default value. | `False` |\n| `exclude_none` | bool | Whether to exclude fields that have a value of `None`. | `False` |\n| `round_trip` | bool | Whether to use serialization/deserialization between JSON and class instance. | `False` |\n| `warnings` | bool | Whether to show any warnings that occurred during serialization. | `True` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `str` | A JSON string representation of the model. |\n\n### model_extra {#pydantic.main.BaseModel.model_extra}\n\n\n\n```py\n@property\nmodel_extra(\n    self\n)\n```\n\nGet extra fields set during validation.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `dict[str, Any] | None` | A dictionary of extra fields, or `None` if `config.extra` is not set to `\"allow\"`. |\n\n### model_fields_set {#pydantic.main.BaseModel.model_fields_set}\n\n\n\n```py\n@property\nmodel_fields_set(\n    self\n)\n```\n\nReturns the set of fields that have been set on this model instance.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `set[str]` | A set of strings representing the fields that have been set,i.e. that were not filled from defaults. |\n\n### model_json_schema {#pydantic.main.BaseModel.model_json_schema}\n\n\n\n```py\n@classmethod\nmodel_json_schema(\n    by_alias=True,\n    ref_template='#/$defs/{model}',\n    schema_generator=<class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode='validation',\n)\n```\n\nGenerates a JSON schema for a model class.\n\nTo override the logic used to generate the JSON schema, you can create a subclass of `GenerateJsonSchema`\nwith your desired modifications, then override this method on a custom base class and set the default\nvalue of `schema_generator` to be your subclass.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `by_alias` | bool | Whether to use attribute aliases or not. | `True` |\n| `ref_template` | str | The reference template. | `'#/$defs/{model}'` |\n| `schema_generator` | type[GenerateJsonSchema] | The JSON schema generator. | `<class 'pydantic.json_schema.GenerateJsonSchema'>` |\n| `mode` | JsonSchemaMode | The mode in which to generate the schema. | `'validation'` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `dict[str, Any]` | The JSON schema for the given model class. |\n\n### model_parametrized_name {#pydantic.main.BaseModel.model_parametrized_name}\n\n\n\n```py\n@classmethod\nmodel_parametrized_name(\n    params\n)\n```\n\nCompute the class name for parametrizations of generic classes.\n\nThis method can be overridden to achieve a custom naming scheme for generic BaseModels.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `params` | tuple[type[Any], ...] | Tuple of types of the class. Given a generic class`Model` with 2 type variables and a concrete model `Model[str, int]`,the value `(str, int)` would be passed to `params`. | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `str` | String representing the new class where `params` are passed to `cls` as type variables. |\n\n**Exceptions**:\n\n|  Type | Description |\n|---|---|\n| `TypeError` | Raised when trying to generate concrete names for non-generic models. |\n\n### model_post_init {#pydantic.main.BaseModel.model_post_init}\n\n\n\n```py\nmodel_post_init(\n    self, _BaseModel__context\n)\n```\n\nOverride this method to perform additional initialization after `__init__` and `model_construct`.\n\nThis is useful if you want to do some validation that requires the entire model to be initialized.\n\n### model_rebuild {#pydantic.main.BaseModel.model_rebuild}\n\n\n\n```py\n@classmethod\nmodel_rebuild(\n    force=False,\n    raise_errors=True,\n    _parent_namespace_depth=2,\n    _types_namespace=None,\n)\n```\n\nTry to rebuild the pydantic-core schema for the model.\n\nThis may be necessary when one of the annotations is a ForwardRef which could not be resolved during\nthe initial attempt to build the schema, and automatic rebuilding fails.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `force` | bool | Whether to force the rebuilding of the model schema, defaults to `False`. | `False` |\n| `raise_errors` | bool | Whether to raise errors, defaults to `True`. | `True` |\n| `_parent_namespace_depth` | int | The depth level of the parent namespace, defaults to 2. | `2` |\n| `_types_namespace` | dict[str, Any] | None | The types namespace, defaults to `None`. | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `bool | None` | Returns `None` if the schema is already \"complete\" and rebuilding was not required.If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. |\n\n### model_validate {#pydantic.main.BaseModel.model_validate}\n\n\n\n```py\n@classmethod\nmodel_validate(\n    obj, strict=None, from_attributes=None, context=None\n)\n```\n\nValidate a pydantic model instance.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `obj` | Any | The object to validate. | *required* |\n| `strict` | bool | None | Whether to raise an exception on invalid fields. | `None` |\n| `from_attributes` | bool | None | Whether to extract data from object attributes. | `None` |\n| `context` | dict[str, Any] | None | Additional context to pass to the validator. | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Model` | The validated model instance. |\n\n**Exceptions**:\n\n|  Type | Description |\n|---|---|\n| `ValidationError` | If the object could not be validated. |\n\n### model_validate_json {#pydantic.main.BaseModel.model_validate_json}\n\n\n\n```py\n@classmethod\nmodel_validate_json(\n    json_data, strict=None, context=None\n)\n```\n\nValidate the given JSON data against the Pydantic model.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `json_data` | str | bytes | bytearray | The JSON data to validate. | *required* |\n| `strict` | bool | None | Whether to enforce types strictly. | `None` |\n| `context` | dict[str, Any] | None | Extra variables to pass to the validator. | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Model` | The validated Pydantic model. |\n\n**Exceptions**:\n\n|  Type | Description |\n|---|---|\n| `ValueError` | If `json_data` is not a JSON string. |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/encoder/avro_decoder.md",
    "content": "### avro_decoder {#fastkafka.encoder.avro_decoder}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L263-L279\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\navro_decoder(\n    raw_msg, cls\n)\n```\n\nDecoder to decode avro encoded messages to pydantic model instance\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `raw_msg` | `bytes` | Avro encoded bytes message received from Kafka topic | *required* |\n| `cls` | `Type[pydantic.main.BaseModel]` | Pydantic class; This pydantic class will be used to construct instance of same class | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Any` | An instance of given pydantic class |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/encoder/avro_encoder.md",
    "content": "### avro_encoder {#fastkafka.encoder.avro_encoder}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L239-L259\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\navro_encoder(\n    msg\n)\n```\n\nEncoder to encode pydantic instances to avro message\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `msg` | `BaseModel` | An instance of pydantic basemodel | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `bytes` | A bytes message which is encoded from pydantic basemodel |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/encoder/avsc_to_pydantic.md",
    "content": "### avsc_to_pydantic {#fastkafka.encoder.avsc_to_pydantic}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L283-L403\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\navsc_to_pydantic(\n    schema\n)\n```\n\nGenerate pydantic model from given Avro Schema\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `schema` | `Dict[str, Any]` | Avro schema in dictionary format | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Type[pydantic.main.BaseModel]` | Pydantic model class built from given avro schema |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/encoder/json_decoder.md",
    "content": "### json_decoder {#fastkafka.encoder.json_decoder}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/json.py#L42-L55\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\njson_decoder(\n    raw_msg, cls\n)\n```\n\nDecoder to decode json string in bytes to pydantic model instance\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `raw_msg` | `bytes` | Bytes message received from Kafka topic | *required* |\n| `cls` | `Type[pydantic.main.BaseModel]` | Pydantic class; This pydantic class will be used to construct instance of same class | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Any` | An instance of given pydantic class |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/encoder/json_encoder.md",
    "content": "### json_encoder {#fastkafka.encoder.json_encoder}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/json.py#L28-L38\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\njson_encoder(\n    msg\n)\n```\n\nEncoder to encode pydantic instances to json string\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `msg` | `BaseModel` | An instance of pydantic basemodel | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `bytes` | Json string in bytes which is encoded from pydantic basemodel |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/executors/DynamicTaskExecutor.md",
    "content": "## fastkafka.executors.DynamicTaskExecutor {#fastkafka.executors.DynamicTaskExecutor}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L207-L272\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n\nA class that implements a dynamic task executor for processing consumer records.\n\nThe DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task.\n\n### __init__ {#fastkafka._components.task_streaming.DynamicTaskExecutor.init}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L214-L237\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n__init__(\n    self, throw_exceptions=False, max_buffer_size=100000, size=100000\n)\n```\n\nCreate an instance of DynamicTaskExecutor\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `throw_exceptions` | `bool` | Flag indicating whether exceptions should be thrown ot logged.Defaults to False. | `False` |\n| `max_buffer_size` | `int` | Maximum buffer size for the memory object stream.Defaults to 100_000. | `100000` |\n| `size` | `int` | Size of the task pool. Defaults to 100_000. | `100000` |\n\n### run {#fastkafka._components.task_streaming.DynamicTaskExecutor.run}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L239-L272\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nrun(\n    self, is_shutting_down_f, generator, processor\n)\n```\n\nRuns the dynamic task executor.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `is_shutting_down_f` | `Callable[[], bool]` | Function to check if the executor is shutting down. | *required* |\n| `generator` | `Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]]` | Generator function for retrieving consumer records. | *required* |\n| `processor` | `Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]` | Processor function for processing consumer records. | *required* |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/executors/SequentialExecutor.md",
    "content": "## fastkafka.executors.SequentialExecutor {#fastkafka.executors.SequentialExecutor}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L305-L356\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n\nA class that implements a sequential executor for processing consumer records.\n\nThe SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines.\n\n### __init__ {#fastkafka._components.task_streaming.SequentialExecutor.init}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L312-L326\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n__init__(\n    self, throw_exceptions=False, max_buffer_size=100000\n)\n```\n\nCreate an instance of SequentialExecutor\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `throw_exceptions` | `bool` | Flag indicating whether exceptions should be thrown or logged.Defaults to False. | `False` |\n| `max_buffer_size` | `int` | Maximum buffer size for the memory object stream.Defaults to 100_000. | `100000` |\n\n### run {#fastkafka._components.task_streaming.SequentialExecutor.run}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L328-L356\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nrun(\n    self, is_shutting_down_f, generator, processor\n)\n```\n\nRuns the sequential executor.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `is_shutting_down_f` | `Callable[[], bool]` | Function to check if the executor is shutting down. | *required* |\n| `generator` | `Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]]` | Generator function for retrieving consumer records. | *required* |\n| `processor` | `Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]` | Processor function for processing consumer records. | *required* |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/testing/ApacheKafkaBroker.md",
    "content": "## fastkafka.testing.ApacheKafkaBroker {#fastkafka.testing.ApacheKafkaBroker}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L168-L305\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n\nApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.\n\n### __init__ {#fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.init}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L173-L209\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n__init__(\n    self,\n    topics=[],\n    retries=3,\n    apply_nest_asyncio=False,\n    zookeeper_port=2181,\n    listener_port=9092,\n)\n```\n\nInitialises the ApacheKafkaBroker object\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `topics` | `Iterable[str]` | List of topics to create after sucessfull Kafka broker startup | `[]` |\n| `retries` | `int` | Number of retries to create kafka and zookeeper services using random | `3` |\n| `apply_nest_asyncio` | `bool` | set to True if running in notebook | `False` |\n| `zookeeper_port` | `int` | Port for clients (Kafka brokes) to connect | `2181` |\n| `listener_port` | `int` | Port on which the clients (producers and consumers) can connect | `9092` |\n\n### get_service_config_string {#fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.get_service_config_string}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L459-L475\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nget_service_config_string(\n    self, service, data_dir\n)\n```\n\nGets the configuration string for a service.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `service` | `str` | Name of the service (\"kafka\" or \"zookeeper\"). | *required* |\n| `data_dir` | `Path` | Path to the directory where the service will save data. | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `str` | The service configuration string. |\n\n### is_started {#fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.is_started}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L212-L222\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@property\nis_started(\n    self\n)\n```\n\nProperty indicating whether the ApacheKafkaBroker object is started.\n\nThe is_started property indicates if the ApacheKafkaBroker object is currently\nin a started state. This implies that Zookeeper and Kafka broker processes have\nsucesfully started and are ready for handling events.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `bool` | True if the object is started, False otherwise. |\n\n### start {#fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.start}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L624-L664\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nstart(\n    self\n)\n```\n\nStarts a local Kafka broker and ZooKeeper instance synchronously.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `str` | The Kafka broker bootstrap server address in string format: host:port. |\n\n### stop {#fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.stop}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L668-L680\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nstop(\n    self\n)\n```\n\nStops a local kafka broker and zookeeper instance synchronously\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/testing/LocalRedpandaBroker.md",
    "content": "## fastkafka.testing.LocalRedpandaBroker {#fastkafka.testing.LocalRedpandaBroker}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L84-L200\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n\nLocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.\n\n### __init__ {#fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.init}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L88-L120\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n__init__(\n    self,\n    topics=[],\n    retries=3,\n    apply_nest_asyncio=False,\n    listener_port=9092,\n    tag='v23.1.2',\n    seastar_core=1,\n    memory='1G',\n    mode='dev-container',\n    default_log_level='debug',\n    kwargs,\n)\n```\n\nInitialises the LocalRedpandaBroker object\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `topics` | `Iterable[str]` | List of topics to create after sucessfull redpanda broker startup | `[]` |\n| `retries` | `int` | Number of retries to create redpanda service | `3` |\n| `apply_nest_asyncio` | `bool` | set to True if running in notebook | `False` |\n| `listener_port` | `int` | Port on which the clients (producers and consumers) can connect | `9092` |\n| `tag` | `str` | Tag of Redpanda image to use to start container | `'v23.1.2'` |\n| `seastar_core` | `int` | Core(s) to use byt Seastar (the framework Redpanda uses under the hood) | `1` |\n| `memory` | `str` | The amount of memory to make available to Redpanda | `'1G'` |\n| `mode` | `str` | Mode to use to load configuration properties in container | `'dev-container'` |\n| `default_log_level` | `str` | Log levels to use for Redpanda | `'debug'` |\n\n### get_service_config_string {#fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.get_service_config_string}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L168-L174\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nget_service_config_string(\n    self, service, data_dir\n)\n```\n\nGenerates a configuration for a service\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `data_dir` | `Path` | Path to the directory where the zookeepeer instance will save data | *required* |\n| `service` | `str` | \"redpanda\", defines which service to get config string for | *required* |\n\n### is_started {#fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.is_started}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L123-L133\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@property\nis_started(\n    self\n)\n```\n\nProperty indicating whether the LocalRedpandaBroker object is started.\n\nThe is_started property indicates if the LocalRedpandaBroker object is currently\nin a started state. This implies that Redpanda docker container has sucesfully\nstarted and is ready for handling events.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `bool` | True if the object is started, False otherwise. |\n\n### start {#fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.start}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L333-L372\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nstart(\n    self\n)\n```\n\nStarts a local redpanda broker instance synchronously\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `str` | Redpanda broker bootstrap server address in string format: add:port |\n\n### stop {#fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.stop}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L376-L388\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nstop(\n    self\n)\n```\n\nStops a local redpanda broker instance synchronously\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/api/fastkafka/testing/Tester.md",
    "content": "### __init__ {#fastkafka._application.tester.Tester.init}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/tester.py#L51-L77\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n__init__(\n    self, app, use_in_memory_broker=True\n)\n```\n\nMirror-like object for testing a FastKafka application\n\nCan be used as context manager\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `app` | `Union[fastkafka.FastKafka, List[fastkafka.FastKafka]]` | The FastKafka application to be tested. | *required* |\n| `use_in_memory_broker` | `bool` | Whether to use an in-memory broker for testing or not. | `True` |\n\n### benchmark {#fastkafka._application.app.FastKafka.benchmark}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1108-L1159\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nbenchmark(\n    self, interval=1, sliding_window_size=None\n)\n```\n\nDecorator to benchmark produces/consumes functions\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `interval` | `Union[int, datetime.timedelta]` | Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second | `1` |\n| `sliding_window_size` | `Optional[int]` | The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculated | `None` |\n\n### consumes {#fastkafka._application.app.FastKafka.consumes}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L474-L557\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nconsumes(\n    self,\n    topic=None,\n    decoder='json',\n    executor=None,\n    brokers=None,\n    prefix='on_',\n    description=None,\n    loop=None,\n    bootstrap_servers='localhost',\n    client_id='aiokafka-0.8.1',\n    group_id=None,\n    key_deserializer=None,\n    value_deserializer=None,\n    fetch_max_wait_ms=500,\n    fetch_max_bytes=52428800,\n    fetch_min_bytes=1,\n    max_partition_fetch_bytes=1048576,\n    request_timeout_ms=40000,\n    retry_backoff_ms=100,\n    auto_offset_reset='latest',\n    enable_auto_commit=True,\n    auto_commit_interval_ms=5000,\n    check_crcs=True,\n    metadata_max_age_ms=300000,\n    partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n    max_poll_interval_ms=300000,\n    rebalance_timeout_ms=None,\n    session_timeout_ms=10000,\n    heartbeat_interval_ms=3000,\n    consumer_timeout_ms=200,\n    max_poll_records=None,\n    ssl_context=None,\n    security_protocol='PLAINTEXT',\n    api_version='auto',\n    exclude_internal_topics=True,\n    connections_max_idle_ms=540000,\n    isolation_level='read_uncommitted',\n    sasl_mechanism='PLAIN',\n    sasl_plain_password=None,\n    sasl_plain_username=None,\n    sasl_kerberos_service_name='kafka',\n    sasl_kerberos_domain_name=None,\n    sasl_oauth_token_provider=None,\n)\n```\n\nDecorator registering the callback called when a message is received in a topic.\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `topic` | `Optional[str]` | Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefix | `None` |\n| `decoder` | `Union[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]` | Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function. | `'json'` |\n| `executor` | `Union[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]` | Type of executor to choose for consuming tasks. Avaliable optionsare \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\"SequentialExecutor\" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use\"DynamicTaskExecutor\" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking. | `None` |\n| `prefix` | `str` | Prefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: \"on_\". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError | `'on_'` |\n| `brokers` | `Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]` | Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka. | `None` |\n| `description` | `Optional[str]` | Optional description of the consuming function async docs.If not provided, consuming function __doc__ attr will be used. | `None` |\n| `bootstrap_servers` |  | a ``host[:port]`` string (or list of``host[:port]`` strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to ``localhost:9092``. | `'localhost'` |\n| `client_id` |  | a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:`~.consumer.group_coordinator.GroupCoordinator`for logging with respect to consumer group administration. Default:``aiokafka-{version}`` | `'aiokafka-0.8.1'` |\n| `group_id` |  | name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None | `None` |\n| `key_deserializer` |  | Any callable that takes araw message key and returns a deserialized key. | `None` |\n| `value_deserializer` |  | Any callable that takes araw message value and returns a deserialized value. | `None` |\n| `fetch_min_bytes` |  | Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to`fetch_max_wait_ms` for more data to accumulate. Default: 1. | `1` |\n| `fetch_max_bytes` |  | The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb). | `52428800` |\n| `fetch_max_wait_ms` |  | The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500. | `500` |\n| `max_partition_fetch_bytes` |  | The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ``= #partitions * max_partition_fetch_bytes``.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576. | `1048576` |\n| `max_poll_records` |  | The maximum number of records returned in asingle call to :meth:`.getmany`. Defaults ``None``, no limit. | `None` |\n| `request_timeout_ms` |  | Client request timeout in milliseconds.Default: 40000. | `40000` |\n| `retry_backoff_ms` |  | Milliseconds to backoff when retrying onerrors. Default: 100. | `100` |\n| `auto_offset_reset` |  | A policy for resetting offsets on:exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldestavailable message, ``latest`` will move to the most recent, and``none`` will raise an exception so you can handle this case.Default: ``latest``. | `'latest'` |\n| `enable_auto_commit` |  | If true the consumer's offset will beperiodically committed in the background. Default: True. | `True` |\n| `auto_commit_interval_ms` |  | milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000. | `5000` |\n| `check_crcs` |  | Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True | `True` |\n| `metadata_max_age_ms` |  | The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000 | `300000` |\n| `partition_assignment_strategy` |  | List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:`.RoundRobinPartitionAssignor`] | `(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)` |\n| `max_poll_interval_ms` |  | Maximum allowed time between calls toconsume messages (e.g., :meth:`.getmany`). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See `KIP-62`_ for moreinformation. Default 300000 | `300000` |\n| `rebalance_timeout_ms` |  | The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to `max.poll.interval.ms` configuration,but as ``aiokafka`` will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:`.ConsumerRebalanceListener` to delay rebalacing. Defaultsto ``session_timeout_ms`` | `None` |\n| `session_timeout_ms` |  | Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(`heartbeat.interval.ms`) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe **broker** configuration properties`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.Default: 10000 | `10000` |\n| `heartbeat_interval_ms` |  | The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than `session_timeout_ms`, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000 | `3000` |\n| `consumer_timeout_ms` |  | maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200 | `200` |\n| `api_version` |  | specify which kafka API version to use.:class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.If set to ``auto``, will attempt to infer the broker version byprobing various APIs. Default: ``auto`` | `'auto'` |\n| `security_protocol` |  | Protocol used to communicate with brokers.Valid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,``SASL_SSL``. Default: ``PLAINTEXT``. | `'PLAINTEXT'` |\n| `ssl_context` |  | pre-configured :class:`~ssl.SSLContext`for wrapping socket connections. Directly passed into asyncio's:meth:`~asyncio.loop.create_connection`. For more information see:ref:`ssl_auth`. Default: None. | `None` |\n| `exclude_internal_topics` |  | Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True | `True` |\n| `connections_max_idle_ms` |  | Close idle connections after the numberof milliseconds specified by this config. Specifying `None` willdisable idle checks. Default: 540000 (9 minutes). | `540000` |\n| `isolation_level` |  | Controls how to read messages writtentransactionally.If set to ``read_committed``, :meth:`.getmany` will only returntransactional messages which have been committed.If set to ``read_uncommitted`` (the default), :meth:`.getmany` willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in`read_committed` mode, :meth:`.getmany` will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, `read_committed` consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in `read_committed` the seek_to_end method willreturn the LSO. See method docs below. Default: ``read_uncommitted`` | `'read_uncommitted'` |\n| `sasl_mechanism` |  | Authentication mechanism when security_protocolis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,``OAUTHBEARER``.Default: ``PLAIN`` | `'PLAIN'` |\n| `sasl_plain_username` |  | username for SASL ``PLAIN`` authentication.Default: None | `None` |\n| `sasl_plain_password` |  | password for SASL ``PLAIN`` authentication.Default: None | `None` |\n| `sasl_oauth_token_provider` |  | OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).Default: None | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]` | : A function returning the same function |\n\n### create_docs {#fastkafka._application.app.FastKafka.create_docs}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L938-L964\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\ncreate_docs(\n    self\n)\n```\n\nCreate the asyncapi documentation based on the configured consumers and producers.\n\nThis function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers.\n\nNote:\n    The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\n    attribute of the FastKafka instance.\n\n### create_mocks {#fastkafka._application.app.FastKafka.create_mocks}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1026-L1104\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\ncreate_mocks(\n    self\n)\n```\n\nCreates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\n\n### fastapi_lifespan {#fastkafka._application.app.FastKafka.fastapi_lifespan}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1163-L1182\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nfastapi_lifespan(\n    self, kafka_broker_name\n)\n```\n\nMethod for managing the lifespan of a FastAPI application with a specific Kafka broker.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `kafka_broker_name` | `str` | The name of the Kafka broker to start FastKafka | *required* |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]` | Lifespan function to use for initializing FastAPI |\n\n### get_topics {#fastkafka._application.app.FastKafka.get_topics}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L663-L672\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nget_topics(\n    self\n)\n```\n\nGet all topics for both producing and consuming.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Iterable[str]` | A set of topics for both producing and consuming. |\n\n### is_started {#fastkafka._application.app.FastKafka.is_started}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L308-L319\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\n@property\nis_started(\n    self\n)\n```\n\nProperty indicating whether the FastKafka object is started.\n\nThe is_started property indicates if the FastKafka object is currently\nin a started state. This implies that all background tasks, producers,\nand consumers have been initiated, and the object is successfully connected\nto the Kafka broker.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `bool` | True if the object is started, False otherwise. |\n\n### produces {#fastkafka._application.app.FastKafka.produces}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L582-L659\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nproduces(\n    self,\n    topic=None,\n    encoder='json',\n    prefix='to_',\n    brokers=None,\n    description=None,\n    loop=None,\n    bootstrap_servers='localhost',\n    client_id=None,\n    metadata_max_age_ms=300000,\n    request_timeout_ms=40000,\n    api_version='auto',\n    acks=<object object at 0x7ff10d5f9100>,\n    key_serializer=None,\n    value_serializer=None,\n    compression_type=None,\n    max_batch_size=16384,\n    partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,\n    max_request_size=1048576,\n    linger_ms=0,\n    send_backoff_ms=100,\n    retry_backoff_ms=100,\n    security_protocol='PLAINTEXT',\n    ssl_context=None,\n    connections_max_idle_ms=540000,\n    enable_idempotence=False,\n    transactional_id=None,\n    transaction_timeout_ms=60000,\n    sasl_mechanism='PLAIN',\n    sasl_plain_password=None,\n    sasl_plain_username=None,\n    sasl_kerberos_service_name='kafka',\n    sasl_kerberos_domain_name=None,\n    sasl_oauth_token_provider=None,\n)\n```\n\nDecorator registering the callback called when delivery report for a produced message is received\n\nThis function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `topic` | `Optional[str]` | Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix. | `None` |\n| `encoder` | `Union[str, Callable[[pydantic.main.BaseModel], bytes]]` | Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function. | `'json'` |\n| `prefix` | `str` | Prefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: \"to_\". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError | `'to_'` |\n| `brokers` | `Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]` | Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka. | `None` |\n| `description` | `Optional[str]` | Optional description of the producing function async docs.If not provided, producing function __doc__ attr will be used. | `None` |\n| `bootstrap_servers` |  | a ``host[:port]`` string or list of``host[:port]`` strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list.  It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to ``localhost:9092``. | `'localhost'` |\n| `client_id` |  | a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ``aiokafka-producer-#`` (appended with a unique numberper instance) | `None` |\n| `key_serializer` |  | used to convert user-supplied keys to bytesIf not :data:`None`, called as ``f(key),`` should return:class:`bytes`.Default: :data:`None`. | `None` |\n| `value_serializer` |  | used to convert user-supplied messagevalues to :class:`bytes`. If not :data:`None`, called as``f(value)``, should return :class:`bytes`.Default: :data:`None`. | `None` |\n| `acks` |  | one of ``0``, ``1``, ``all``. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:* ``0``: Producer will not wait for any acknowledgment from the server  at all. The message will immediately be added to the socket  buffer and considered sent. No guarantee can be made that the  server has received the record in this case, and the retries  configuration will not take effect (as the client won't  generally know of any failures). The offset given back for each  record will always be set to -1.* ``1``: The broker leader will write the record to its local log but  will respond without awaiting full acknowledgement from all  followers. In this case should the leader fail immediately  after acknowledging the record but before the followers have  replicated it then the record will be lost.* ``all``: The broker leader will wait for the full set of in-sync  replicas to acknowledge the record. This guarantees that the  record will not be lost as long as at least one in-sync replica  remains alive. This is the strongest available guarantee.If unset, defaults to ``acks=1``. If `enable_idempotence` is:data:`True` defaults to ``acks=all`` | `<object object at 0x7ff10d5f9100>` |\n| `compression_type` |  | The compression type for all data generated bythe producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``or :data:`None`.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:`None`. | `None` |\n| `max_batch_size` |  | Maximum size of buffered data per partition.After this amount :meth:`send` coroutine will block until batch isdrained.Default: 16384 | `16384` |\n| `linger_ms` |  | The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than `linger_ms`, producer will wait ``linger_ms - process_time``.Default: 0 (i.e. no delay). | `0` |\n| `partitioner` |  | Callable used to determine which partitioneach message is assigned to. Called (after key serialization):``partitioner(key_bytes, all_partitions, available_partitions)``.The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:`None`, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible). | `<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>` |\n| `max_request_size` |  | The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576. | `1048576` |\n| `metadata_max_age_ms` |  | The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000 | `300000` |\n| `request_timeout_ms` |  | Produce request timeout in milliseconds.As it's sent as part of:class:`~kafka.protocol.produce.ProduceRequest` (it's a blockingcall), maximum waiting time can be up to ``2 *request_timeout_ms``.Default: 40000. | `40000` |\n| `retry_backoff_ms` |  | Milliseconds to backoff when retrying onerrors. Default: 100. | `100` |\n| `api_version` |  | specify which kafka API version to use.If set to ``auto``, will attempt to infer the broker version byprobing various APIs. Default: ``auto`` | `'auto'` |\n| `security_protocol` |  | Protocol used to communicate with brokers.Valid values are: ``PLAINTEXT``, ``SSL``, ``SASL_PLAINTEXT``,``SASL_SSL``. Default: ``PLAINTEXT``. | `'PLAINTEXT'` |\n| `ssl_context` |  | pre-configured :class:`~ssl.SSLContext`for wrapping socket connections. Directly passed into asyncio's:meth:`~asyncio.loop.create_connection`. For moreinformation see :ref:`ssl_auth`.Default: :data:`None` | `None` |\n| `connections_max_idle_ms` |  | Close idle connections after the numberof milliseconds specified by this config. Specifying :data:`None` willdisable idle checks. Default: 540000 (9 minutes). | `540000` |\n| `enable_idempotence` |  | When set to :data:`True`, the producer willensure that exactly one copy of each message is written in thestream. If :data:`False`, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ``all``. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:`ValueError` will be thrown.New in version 0.5.0. | `False` |\n| `sasl_mechanism` |  | Authentication mechanism when security_protocolis configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid valuesare: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,``OAUTHBEARER``.Default: ``PLAIN`` | `'PLAIN'` |\n| `sasl_plain_username` |  | username for SASL ``PLAIN`` authentication.Default: :data:`None` | `None` |\n| `sasl_plain_password` |  | password for SASL ``PLAIN`` authentication.Default: :data:`None` | `None` |\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]` | : A function returning the same function |\n\n**Exceptions**:\n\n|  Type | Description |\n|---|---|\n| `ValueError` | when needed |\n\n### run_in_background {#fastkafka._application.app.FastKafka.run_in_background}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L676-L709\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nrun_in_background(\n    self\n)\n```\n\nDecorator to schedule a task to be run in the background.\n\nThis decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n**Returns**:\n\n|  Type | Description |\n|---|---|\n| `Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]` | A decorator function that takes a background task as an input and stores it to be run in the backround. |\n\n### set_kafka_broker {#fastkafka._application.app.FastKafka.set_kafka_broker}\n\n<a href=\"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L321-L337\" class=\"link-to-source\" target=\"_blank\">View source</a>\n\n```py\nset_kafka_broker(\n    self, kafka_broker_name\n)\n```\n\nSets the Kafka broker to start FastKafka with\n\n**Parameters**:\n\n|  Name | Type | Description | Default |\n|---|---|---|---|\n| `kafka_broker_name` | `str` | The name of the Kafka broker to start FastKafka | *required* |\n\n**Exceptions**:\n\n|  Type | Description |\n|---|---|\n| `ValueError` | If the provided kafka_broker_name is not found in dictionary of kafka_brokers |\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/cli/fastkafka.md",
    "content": "# `fastkafka`\n\n**Usage**:\n\n```console\n$ fastkafka [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `docs`: Commands for managing FastKafka app...\n* `run`: Runs Fast Kafka API application\n* `testing`: Commands for managing FastKafka testing\n\n## `fastkafka docs`\n\nCommands for managing FastKafka app documentation\n\n**Usage**:\n\n```console\n$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `generate`: Generates documentation for a FastKafka...\n* `install_deps`: Installs dependencies for FastKafka...\n* `serve`: Generates and serves documentation for a...\n\n### `fastkafka docs generate`\n\nGenerates documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs generate [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created; default is current directory\n* `--help`: Show this message and exit.\n\n### `fastkafka docs install_deps`\n\nInstalls dependencies for FastKafka documentation generation\n\n**Usage**:\n\n```console\n$ fastkafka docs install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n### `fastkafka docs serve`\n\nGenerates and serves documentation for a FastKafka application\n\n**Usage**:\n\n```console\n$ fastkafka docs serve [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--root-path TEXT`: root path under which documentation will be created; default is current directory\n* `--bind TEXT`: Some info  [default: 127.0.0.1]\n* `--port INTEGER`: Some info  [default: 8000]\n* `--help`: Show this message and exit.\n\n## `fastkafka run`\n\nRuns Fast Kafka API application\n\n**Usage**:\n\n```console\n$ fastkafka run [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--num-workers INTEGER`: Number of FastKafka instances to run, defaults to number of CPU cores.  [default: 64]\n* `--kafka-broker TEXT`: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.  [default: localhost]\n* `--help`: Show this message and exit.\n\n## `fastkafka testing`\n\nCommands for managing FastKafka testing\n\n**Usage**:\n\n```console\n$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n**Commands**:\n\n* `install_deps`: Installs dependencies for FastKafka app...\n\n### `fastkafka testing install_deps`\n\nInstalls dependencies for FastKafka app testing\n\n**Usage**:\n\n```console\n$ fastkafka testing install_deps [OPTIONS]\n```\n\n**Options**:\n\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/cli/run_fastkafka_server_process.md",
    "content": "# `run_fastkafka_server_process`\n\n**Usage**:\n\n```console\n$ run_fastkafka_server_process [OPTIONS] APP\n```\n\n**Arguments**:\n\n* `APP`: Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.  [required]\n\n**Options**:\n\n* `--kafka-broker TEXT`: Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.  [required]\n* `--install-completion`: Install completion for the current shell.\n* `--show-completion`: Show completion for the current shell, to copy it or customize the installation.\n* `--help`: Show this message and exit.\n\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_00_FastKafka_Demo.md",
    "content": "# FastKafka tutorial\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## Install\n\nFastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\n``` python\ntry:\n    import fastkafka\nexcept:\n    ! pip install fastkafka\n```\n\n## Running in Colab\n\nYou can start this interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\" />\n</a>\n\n## Writing server code\n\nHere is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic.\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/usage/types/#constrained-types),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server.\n\nNext, an object of the\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\ninstances which internally starts Kafka broker and zookeeper.\n\nBefore running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:\n\n``` sh\nfastkafka testing install_deps\n```\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send IrisInputData message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with IrisPrediction in predictions topic\n    await tester.awaited_mocks.on_predictions.assert_awaited_with(\n        IrisPrediction(species=\"setosa\"), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purpuoses\n\n3.  Sent IrisInputData message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to IrisInputData message\n\n## Running the service\n\nThe service can be started using builtin `faskafka run` CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\ndef to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nTo run the service, you will need a running Kafka broker on localhost as\nspecified in the `kafka_brokers` parameter above. We can start the Kafka\nbroker locally using the\n[`ApacheKafkaBroker`](../api/fastkafka/testing/ApacheKafkaBroker.md#fastkafka.testing.ApacheKafkaBroker).\nNotice that the same happens automatically in the\n[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\nas shown above.\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n    [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n    '127.0.0.1:9092'\n\nThen, we start the FastKafka service by running the following command in\nthe folder where the `application.py` file is located:\n\n``` sh\nfastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n```\n\nIn the above command, we use `--num-workers` option to specify how many\nworkers to launch and we use `--kafka-broker` option to specify which\nkafka broker configuration to use from earlier specified `kafka_brokers`\n\n    [1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n    [1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n    [1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n    ^C\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n\nYou need to interupt running of the cell above by selecting\n`Runtime->Interupt execution` on the toolbar above.\n\nFinally, we can stop the local Kafka Broker:\n\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n    [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nWhen running in Colab, we need to update Node.js first:\n\nWe need to install all dependancies for the generator using the\nfollowing command line:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfolloving command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n. This will generate the *asyncapi* folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\n    drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n    [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    ^C\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    bootstrap_servers=\"localhost:9092\",\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_01_Intro.md",
    "content": "# Intro\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nThis tutorial will show you how to use <b>FastKafkaAPI</b>, step by\nstep.\n\nThe goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel.\n\nIn this Intro tutorial we’ll go trough the basic requirements to run the\ndemos presented in future steps.\n\n## Installing FastKafkaAPI\n\nFirst step is to install FastKafkaAPI\n\n``` shell\n$ pip install fastkafka\n```\n\n## Preparing a Kafka broker\n\nNext step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication.\n\n!!! info \"Hey, your first info!\"\n\n    If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n\nTo go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times).\n\n!!! warning \"Listen! This is important.\"\n\n    To be able to setup this configuration you need to have Docker and docker-compose installed\n\n    See here for more info on <a href = \\\"https://docs.docker.com/\\\" target=\\\"_blank\\\">Docker</a> and <a href = \\\"https://docs.docker.com/compose/install/\\\" target=\\\"_blank\\\">docker compose</a>\n\nTo setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g. fastkafka_demo). Inside the new\nfolder create a new YAML file named <b>kafka_demo.yml</b> and copy the\nfollowing configuration into it:\n\n``` yaml\nversion: \"3\"\nservices:\n    zookeeper:\n        image: wurstmeister/zookeeper\n        hostname: zookeeper\n        container_name: zookeeper\n        networks:\n          - fastkafka-network\n        ports:\n          - \"2181:2181\"\n          - \"22:22\"\n          - \"2888:2888\"\n          - \"3888:3888\"\n    kafka:\n        image: wurstmeister/kafka\n        container_name: kafka\n        ports:\n          - \"9093:9093\"\n        environment:\n            HOSTNAME_COMMAND: \"docker info | grep ^Name: | cut -d' ' -f 2\"\n            KAFKA_ZOOKEEPER_CONNECT: \"zookeeper:2181\"\n            KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n            KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n            KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n            KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n            KAFKA_CREATE_TOPICS: \"hello:1:1\"\n        volumes:\n            - /var/run/docker.sock:/var/run/docker.sock\n        depends_on:\n            - zookeeper\n        healthcheck:\n            test: [ \"CMD\", \"kafka-topics.sh\", \"--list\", \"--zookeeper\", \"zookeeper:2181\" ]\n            interval: 5s\n            timeout: 10s\n            retries: 5\n        networks:\n          - fastkafka-network\nnetworks:\n    fastkafka-network:\n        name: \"fastkafka-network\"\n```\n\nThis configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a ‘hello’ topic (quite enough for a\nstart). To start the configuration, run:\n\n``` shell\n$ docker-compose -f kafka_demo.yaml up -d --wait\n```\n\nThis will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! :confetti_ball:\n\n## Running the code\n\nAfter installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the ‘First Steps’ part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem.\n\nYou are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_02_First_Steps.md",
    "content": "# First Steps\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Creating a simple Kafka consumer app\n\nFor our first demo we will create the simplest possible Kafka consumer\nand run it using ‘fastkafka run’ command.\n\nThe consumer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n\n2.  Listen to the hello topic\n\n3.  Write any message received from the hello topic to stdout\n\nTo create the consumer, first, create a file named\n<b>hello_kafka_consumer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n    \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n    print(f\"Got data, msg={msg.msg}\", flush=True)\n```\n\n!!! info \"Kafka configuration\"\n\n    This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\n!!! warning \"Remember to flush\"\n\n    Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n\nTo run this consumer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n    [878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n\nNow you can interact with your consumer, by sending the messages to the\nsubscribed ‘hello’ topic, don’t worry, we will cover this in the next\nstep of this guide.\n\n## Sending first message to your consumer\n\nAfter we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly.\n\nIf you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic.\n\nFirst, connect to your running kafka broker by running:\n\n``` shell\ndocker run -it kafka /bin/bash\n```\n\nThen, when connected to the container, run:\n\n``` shell\nkafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n```\n\nThis will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer.\n\nIn the shell, type:\n\n``` shell\n{\"msg\":\"hello\"}\n```\n\nand press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout.\n\nCheck the output of your consumer (terminal where you ran the ‘fastkafka\nrun’ command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:\n\n``` shell\nGot data, msg=hello\n```\n\n## Creating a hello Kafka producer\n\nConsuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let’s create our first\nkafka producer which will send it’s greetings to our consumer\nperiodically.\n\nThe producer will:\n\n1.  Connect to the Kafka Broker we setup in the Intro guide\n2.  Connect to the hello topic\n3.  Periodically send a message to the hello world topic\n\nTo create the producer, first, create a file named\n<b>hello_kafka_producer.py</b> and copy the following code to it:\n\n``` python\n\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ[\"KAFKA_HOSTNAME\"]\nkafka_server_port = environ[\"KAFKA_PORT\"]\n\nkafka_brokers = {\n    \"localhost\": {\n        \"description\": \"local development kafka\",\n        \"url\": kafka_server_url,\n        \"port\": kafka_server_port\n    }\n}\n\nclass HelloKafkaMsg(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_app = FastKafka(\n    kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n    logger.info(f\"Producing: {msg}\")\n    return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello(HelloKafkaMsg(msg=\"hello\"))\n        await asyncio.sleep(1)\n```\n\n!!! info \"Kafka configuration\"\n\n    This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n\nTo run this producer, in your terminal, run:\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n```\n\nAfter running the command, you should see something similar to the ouput\nbelow:\n\n    [INFO] fastkafka._components.test_dependencies: Java is already installed.\n    [INFO] fastkafka._components.test_dependencies: Kafka is installed.\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n    [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n    [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n    [879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n    [879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n    [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n\nNow, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log.\n\n## Recap\n\nIn this guide we have:\n\n1.  Created a simple Kafka consumer using FastKafka\n2.  Sent a message to our consumer trough Kafka\n3.  Created a simple Kafka producer using FastKafka\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_03_Authentication.md",
    "content": "# Authentication\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## TLS Authentication\n\nsasl_mechanism (str) – Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN\n\nsasl_plain_username (str) – username for SASL PLAIN authentication.\nDefault: None\n\nsasl_plain_password (str) – password for SASL PLAIN authentication.\nDefault: None\n\nsasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_04_Github_Actions_Workflow.md",
    "content": "# Deploy FastKafka docs to GitHub Pages\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Getting started\n\nAdd your workflow file `.github/workflows/fastkafka_docs_deploy.yml` and\npush it to your remote default branch.\n\nHere is an example workflow:\n\n``` yaml\nname: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n  push:\n    branches: [ \"main\", \"master\" ]\n  workflow_dispatch:\n\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n      - uses: airtai/workflows/fastkafka-ghp@main\n        with:\n          app: \"test_fastkafka.application:kafka_app\"\n```\n\n## Options\n\n### Set app location\n\nInput in the form of `path:app`, where `path` is the path to a Python\nfile and `app` is an object of type\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka):\n\n``` yaml\n- name: Deploy\n  uses: airtai/workflows/fastkafka-ghp@main\n  with:\n    app: \"test_fastkafka.application:kafka_app\"\n```\n\nIn the above example,\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napp is named as `kafka_app` and it is available in the `application`\nsubmodule of the `test_fastkafka` module.\n\n## Example Repository\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to `Github Pages` can be found\n[here](https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_05_Lifespan_Handler.md",
    "content": "# Lifespan Events\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nDid you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! :rocket:\n\nLets break it down:\n\nYou can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages.\n\nSimilarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up.\n\nBy executing code before consuming and after producing, you cover the\nentire lifecycle of your application :tada:\n\nThis is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!\n\nSo lets give it a try and see how it can make your Kafka app even more\nawesome! :muscle:\n\n## Lifespan example - Iris prediction model\n\nLet’s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don’t want to load them for every\nmessage.\n\nHere’s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we’ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication.\n\n### Lifespan\n\nYou can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager.\n\nLet’s start with an example and then see it in detail.\n\nWe create an async function lifespan() with yield like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \n```\n\nThe first thing to notice, is that we are defining an async function\nwith `yield`. This is very similar to Dependencies with `yield`.\n\nThe first part of the function, before the `yield`, will be executed\n**before** the application starts. And the part after the `yield` will\nbe executed **after** the application has finished.\n\nThis lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown.\n\nThe lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup.\n\nFor demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called.\n\n### Async context manager\n\nContext managers can be used in `with` blocks, our lifespan, for example\ncould be used like this:\n\n``` python\nml_models = {}\nasync with lifespan(None):\n    print(ml_models)\n```\n\nWhen you create a context manager or an async context manager, what it\ndoes is that, before entering the `with` block, it will execute the code\nbefore the `yield`, and after exiting the `with` block, it will execute\nthe code after the `yield`.\n\nIf you want to learn more about context managers and contextlib\ndecorators, please visit [Python official\ndocs](https://docs.python.org/3/library/contextlib.html)\n\n## App demo\n\n### FastKafka app\n\nLets now create our application using the created lifespan handler.\n\nNotice how we passed our lifespan handler to the app when constructing\nit trough the `lifespan` argument.\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n```\n\n### Data modeling\n\nLets model the Iris data for our app:\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Consumers and producers\n\nLets create a consumer and producer for our app that will generate\npredictions from input iris data.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Final app\n\nThe final app looks like this:\n\n``` python\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    print(\"Loading the model!\")\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n    yield\n    # Clean up the ML models and release the resources\n    \n    print(\"Exiting, clearing model dict!\")\n    ml_models.clear()\n    \nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local development kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Running the app\n\nNow we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n```\n\nWhen you run the app, you should see a simmilar output to the one below:\n\n## Recap\n\nIn this guide we have defined a lifespan handler and passed to our\nFastKafka app.\n\nSome important points are:\n\n1.  Lifespan handler is implemented as\n    [AsyncContextManager](https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager)\n2.  Code **before** yield in lifespan will be executed **before**\n    application **startup**\n3.  Code **after** yield in lifespan will be executed **after**\n    application **shutdown**\n4.  You can pass your lifespan handler to FastKafka app on\n    initialisation by passing a `lifespan` argument\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_06_Benchmarking_FastKafka.md",
    "content": "# Benchmarking FastKafka app\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\nTo benchmark a\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nproject, you will need the following:\n\n1.  A library built with\n    [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka).\n2.  A running `Kafka` instance to benchmark the FastKafka application\n    against.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nhas a decorator for benchmarking which is appropriately called as\n`benchmark`. Let’s edit our `application.py` file and add the\n`benchmark` decorator to the consumes method.\n\n``` python\n# content of the \"application.py\" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nHere we are conducting a benchmark of a function that consumes data from\nthe `input_data` topic with an interval of 1 second and a sliding window\nsize of 5.\n\nThis `benchmark` method uses the `interval` parameter to calculate the\nresults over a specific time period, and the `sliding_window_size`\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation.\n\nThis benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement.\n\n### Starting Kafka\n\nIf you already have a `Kafka` running somewhere, then you can skip this\nstep.\n\nPlease keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself.\n\n#### Installing Java and Kafka\n\nWe need a working `Kafka`instance to benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napp, and to run `Kafka` we need `Java`. Thankfully,\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\ncomes with a CLI to install both `Java` and `Kafka` on our machine.\n\nSo, let’s install `Java` and `Kafka` by executing the following command.\n\n``` cmd\nfastkafka testing install_deps\n```\n\nThe above command will extract `Kafka` scripts at the location\n“\\$HOME/.local/kafka_2.13-3.3.2\" on your machine.\n\n#### Creating configuration for Zookeeper and Kafka\n\nNow we need to start `Zookeeper` and `Kafka` separately, and to start\nthem we need `zookeeper.properties` and `kafka.properties` files.\n\nLet’s create a folder inside the folder where `Kafka` scripts were\nextracted and change directory into it.\n\n``` cmd\nmkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n```\n\nLet’s create a file called `zookeeper.properties` and write the\nfollowing content to the file:\n\n``` txt\ndataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n```\n\nSimilarly, let’s create a file called `kafka.properties` and write the\nfollowing content to the file:\n\n``` txt\nbroker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n```\n\n#### Starting Zookeeper and Kafka\n\nWe need two different terminals to run `Zookeeper` in one and `Kafka` in\nanother. Let’s open a new terminal and run the following commands to\nstart `Zookeeper`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n```\n\nOnce `Zookeeper` is up and running, open a new terminal and execute the\nfollwing commands to start `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n```\n\nNow we have both `Zookeeper` and `Kafka` up and running.\n\n#### Creating topics in Kafka\n\nIn a new terminal, please execute the following command to create\nnecessary topics in `Kafka`:\n\n``` cmd\nexport PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n```\n\n#### Populating topics with dummy data\n\nTo benchmark our\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napp, we need some data in `Kafka` topics.\n\nIn the same terminal, let’s create some dummy data:\n\n``` cmd\nyes '{\"sepal_length\": 0.7739560486, \"sepal_width\": 0.8636615789, \"petal_length\": 0.6122663046, \"petal_width\": 0.1338914722}' | head -n 1000000 > /tmp/test_data\n```\n\nThis command will create a file called `test_data` in the `tmp` folder\nwith one million rows of text. This will act as dummy data to populate\nthe `input_data` topic.\n\nLet’s populate the created topic `input_data` with the dummy data which\nwe created above:\n\n``` cmd\n./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n```\n\nNow our topic `input_data` has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again.\n\n### Benchmarking FastKafka\n\nOnce `Zookeeper` and `Kafka` are ready, benchmarking\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napp is as simple as running the `fastkafka run` command:\n\n``` cmd\nfastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n```\n\nThis command will start the\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napp and begin consuming messages from `Kafka`, which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the `interval` and `sliding_window_size` values.\n\nThe output for the `fastkafka run` command is:\n\n``` txt\n[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n```\n\nBased on the output, when using 1 worker, our\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napp achieved a `throughput` of 93k messages per second and an\n`average throughput` of 93k messages per second.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",
    "content": "# Encoding and Decoding Kafka Messages with FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Prerequisites\n\n1.  A basic knowledge of\n    [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\n    is needed to proceed with this guide. If you are not familiar with\n    [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka),\n    please go through the [tutorial](/docs#tutorial) first.\n2.  [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\n    with its dependencies installed is needed. Please install\n    [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\n    using the command - `pip install fastkafka`\n\n## Ways to Encode and Decode Messages with FastKafka\n\nIn python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings.\n\nIn FastKafka, we specify message schema using Pydantic models as\nmentioned in [tutorial](/docs#messages):\n\n``` python\n# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nThen, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics.\n\nThe `@consumes` and `@produces` methods of FastKafka accept a parameter\ncalled `decoder`/`encoder` to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:\n\n1.  json - This is the default encoder/decoder option in FastKafka.\n    While producing, this option converts our instance of Pydantic model\n    messages to a JSON string and then converts it to bytes before\n    sending it to the topic. While consuming, it converts bytes to a\n    JSON string and then constructs an instance of Pydantic model from\n    the JSON string.\n2.  avro - This option uses Avro encoding/decoding to convert instances\n    of Pydantic model messages to bytes while producing, and while\n    consuming, it constructs an instance of Pydantic model from bytes.\n3.  custom encoder/decoder - If you are not happy with the json or avro\n    encoder/decoder options, you can write your own encoder/decoder\n    functions and use them to encode/decode Pydantic messages.\n\n## 1. Json encoder and decoder\n\nThe default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string.\n\nWe can use the application from [tutorial](/docs#running-the-service) as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder\nparameter:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"json\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"json\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above code, the `@kafka_app.consumes` decorator sets up a\nconsumer for the “input_data\" topic, using the ‘json’ decoder to convert\nthe message payload to an instance of `IrisInputData`. The\n`@kafka_app.produces` decorator sets up a producer for the “predictions\"\ntopic, using the ‘json’ encoder to convert the instance of\n`IrisPrediction` to message payload.\n\n## 2. Avro encoder and decoder\n\n### What is Avro?\n\nAvro is a row-oriented remote procedure call and data serialization\nframework developed within Apache’s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n[docs](https://avro.apache.org/docs/).\n\n### Installing FastKafka with Avro dependencies\n\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nwith Avro support using the command - `pip install fastkafka[avro]`\n\n### Defining Avro Schema Using Pydantic Models\n\nBy default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models.\n\nSo, similar to the [tutorial](/docs#tutorial), the message schema will\nremain as it is.\n\n``` python\n# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\nNo need to change anything to support avro. You can use existing\nPydantic models as is.\n\n### Reusing existing avro schema\n\nIf you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined.\n\n#### Building pydantic models from avro schema dictionary\n\nLet’s modify the above example and let’s assume we have schemas already\nfor `IrisInputData` and `IrisPrediction` which will look like below:\n\n``` python\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n```\n\nWe can easily construct pydantic models from avro schema using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md#fastkafka.encoder.avsc_to_pydantic)\nfunction which is included as part of\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nitself.\n\n``` python\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.model_fields)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.model_fields)\n```\n\nThe above code will convert avro schema to pydantic models and will\nprint pydantic models’ fields. The output of the above is:\n\n``` txt\n{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n```\n\nThis is exactly same as manually defining the pydantic models ourselves.\nYou don’t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md#fastkafka.encoder.avsc_to_pydantic)\nfunction as demonstrated above.\n\n#### Building pydantic models from `.avsc` file\n\nNot all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary `.avsc` files in\nfilesystem. Let’s see how to convert those `.avsc` files to pydantic\nmodels.\n\nLet’s assume our avro files are stored in files called\n`iris_input_data_schema.avsc` and `iris_prediction_schema.avsc`. In that\ncase, following code converts the schema to pydantic models:\n\n``` python\nimport json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open(\"iris_input_data_schema.avsc\", \"rb\") as f:\n    iris_input_data_schema = json.load(f)\n    \nwith open(\"iris_prediction_schema.avsc\", \"rb\") as f:\n    iris_prediction_schema = json.load(f)\n    \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.model_fields)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.model_fields)\n```\n\n### Consume/Produce avro messages with FastKafka\n\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nprovides `@consumes` and `@produces` methods to consume/produces\nmessages to/from a `Kafka` topic. This is explained in\n[tutorial](/docs#function-decorators).\n\nThe `@consumes` and `@produces` methods accepts a parameter called\n`decoder`/`encoder` to decode/encode avro messages.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", encoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", decoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nIn the above example, in `@consumes` and `@produces` methods, we\nexplicitly instruct FastKafka to `decode` and `encode` messages using\nthe `avro` `decoder`/`encoder` instead of the default `json`\n`decoder`/`encoder`.\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use `avro` to `decode` and\n`encode` messages:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\niris_input_data_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisInputData\",\n    \"name\": \"IrisInputData\",\n    \"fields\": [\n        {\"doc\": \"Sepal length in cm\", \"type\": \"double\", \"name\": \"sepal_length\"},\n        {\"doc\": \"Sepal width in cm\", \"type\": \"double\", \"name\": \"sepal_width\"},\n        {\"doc\": \"Petal length in cm\", \"type\": \"double\", \"name\": \"petal_length\"},\n        {\"doc\": \"Petal width in cm\", \"type\": \"double\", \"name\": \"petal_width\"},\n    ],\n}\niris_prediction_schema = {\n    \"type\": \"record\",\n    \"namespace\": \"IrisPrediction\",\n    \"name\": \"IrisPrediction\",\n    \"fields\": [{\"doc\": \"Predicted species\", \"type\": \"string\", \"name\": \"species\"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=\"avro\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=\"avro\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThe above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n[`avsc_to_pydantic`](../api/fastkafka/encoder/avsc_to_pydantic.md#fastkafka.encoder.avsc_to_pydantic)\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages.\n\nThe\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nclass is then instantiated with the broker details, and two functions\ndecorated with `@kafka_app.consumes` and `@kafka_app.produces` are\ndefined to consume messages from the “input_data\" topic and produce\nmessages to the “predictions\" topic, respectively. The functions uses\nthe decoder=“avro\" and encoder=“avro\" parameters to decode and encode\nthe Avro messages.\n\nIn summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline.\n\n## 3. Custom encoder and decoder\n\nIf you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages.\n\n### Writing a custom encoder and decoder\n\nIn this section, let’s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n[ROT13](https://en.wikipedia.org/wiki/ROT13) cipher.\n\n``` python\nimport codecs\nimport json\nfrom typing import Any, Type\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n```\n\nThe above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library.\n\nThe encoding function, `custom_encoder()`, takes a message `msg` which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe `json()` method, obfuscates the resulting string using the ROT13\nalgorithm from the `codecs` module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding.\n\nThe decoding function, `custom_decoder()`, takes a raw message `raw_msg`\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the `json.loads()` method and returns a\nnew instance of the specified `cls` class initialized with the decoded\ndictionary.\n\nThese functions can be used with FastKafka’s `encoder` and `decoder`\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics.\n\nLet’s test the above code\n\n``` python\ni = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n```\n\nThis will result in following output\n\n``` txt\nb'{\"frcny_yratgu\": 0.5, \"frcny_jvqgu\": 0.5, \"crgny_yratgu\": 0.5, \"crgny_jvqgu\": 0.5}'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n```\n\n### Assembling it all together\n\nLet’s rewrite the sample code found in\n[tutorial](/docs#running-the-service) to use our custom decoder and\nencoder functions:\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\n\nimport codecs\nimport json\nfrom typing import Any, Type\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n    msg_str = msg.json()\n    obfuscated = codecs.encode(msg_str, 'rot13')\n    raw_bytes = obfuscated.encode(\"utf-8\")\n    return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n    obfuscated = raw_msg.decode(\"utf-8\")\n    msg_str = codecs.decode(obfuscated, 'rot13')\n    msg_dict = json.loads(msg_str)\n    return cls(**msg_dict)\n\n    \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\nThis code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system.\n\nThe custom `encoder` function takes a message represented as a\n`BaseModel` and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned.\n\nThe custom `decoder` function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the `json`\nmodule. This dictionary is then converted to a `BaseModel` instance\nusing the cls parameter.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_11_Consumes_Basics.md",
    "content": "# @consumes basics\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@consumes` decorator to consume messages from Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will consume\n`HelloWorld` messages from hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\n\nTo use the `@consumes` decorator, first we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\nIn this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger.\n\n``` python\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to\nconsume from the topic using [pydantic](https://docs.pydantic.dev/). For\nthe guide we’ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a consumer function and decorate it with `@consumes`\n\nLet’s create a consumer function that will consume `HelloWorld` messages\nfrom *hello_world* topic and log them.\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nThe function decorated with the `@consumes` decorator will be called\nwhen a message is produced to Kafka.\n\nThe message will then be injected into the typed *msg* argument of the\nfunction and its type will be used to parse the message.\n\nIn this example case, when the message is sent into a *hello_world*\ntopic, it will be parsed into a HelloWorld class and `on_hello_world`\nfunction will be called with the parsed class as *msg* argument value.\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in consumer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n    [14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [14442]: 23-06-15 07:16:00.585 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    Starting process cleanup, this may take a few seconds...\n    23-06-15 07:16:04.626 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14442...\n    [14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-15 07:16:05.853 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14442 terminated.\n\n## Send the message to kafka topic\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho { \\\"msg\\\": \\\"Hello world\\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\n    [15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n    [15588]: 23-06-15 07:16:15.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [15588]: 23-06-15 07:16:15.294 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [15588]: 23-06-15 07:16:15.295 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [15588]: 23-06-15 07:16:15.295 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [15588]: 23-06-15 07:16:15.302 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    [15588]: 23-06-15 07:16:25.867 [INFO] consumer_example: Got msg: msg='Hello world'\n    Starting process cleanup, this may take a few seconds...\n    23-06-15 07:16:34.168 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 15588...\n    [15588]: 23-06-15 07:16:35.358 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [15588]: 23-06-15 07:16:35.359 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-15 07:16:35.475 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 15588 terminated.\n\nYou should see the “Got msg: msg='Hello world'\" being logged by your\nconsumer.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are\nreceiving the message from, this is because the `@consumes` decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default “on\\_\" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n*hello_world*.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nconsumes decorator, like this:\n\n``` python\n@app.consumes(prefix=\"read_from_\")\nasync def read_from_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in consumes decorator, like this:\n\n``` python\n@app.consumes(topic=\"my_special_topic\")\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\n## Message data\n\nThe message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of *msg* parameter in the\nfunction decorated by the `@consumes` decorator.\n\nIn this example case, the message will be parsed into a `HelloWorld`\nclass.\n\n## Message metadata\n\nIf you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction.\n\nLet’s demonstrate that.\n\n### Create a consumer function with metadata\n\nThe only difference from the original basic consume function is that we\nare now passing the `meta: EventMetadata` argument to the function. The\n`@consumes` decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains.\n\nFirst, we need to import the EventMetadata\n\n``` python\nfrom fastkafka import EventMetadata\n```\n\nNow we can add the `meta` argument to our consuming function.\n\n``` python\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld, meta: EventMetadata):\n    logger.info(f\"Got metadata: {meta}\")\n```\n\nYour final app should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka import EventMetadata\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld, meta: EventMetadata):\n    logger.info(f\"Got metadata: {meta}\")\n```\n\nNow lets run the app and send a message to the broker to see the logged\nmessage metadata.\n\nYou should see a similar log as the one below and the metadata being\nlogged in your app.\n\n    [20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n    [20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [20050]: 23-06-15 07:18:55.682 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    [20050]: 23-06-15 07:19:06.337 [INFO] consumer_example: Got metadata: EventMetadata(topic='hello_world', partition=0, offset=0, timestamp=1686813546255, timestamp_type=0, key=None, value=b'{ \"msg\": \"Hello world\" }', checksum=None, serialized_key_size=-1, serialized_value_size=24, headers=())\n    Starting process cleanup, this may take a few seconds...\n    23-06-15 07:19:14.547 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20050...\n    [20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-15 07:19:15.742 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20050 terminated.\n\nAs you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n:tada:\n\n## Dealing with high latency consuming functions\n\nIf your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consuming events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead.\n\nBut, to handle those high latency tasks and run them in parallel,\nFastKafka has a\n[`DynamicTaskExecutor`](../api/fastkafka/executors/DynamicTaskExecutor.md#fastkafka.executors.DynamicTaskExecutor)\nprepared for your consumers. This executor comes with additional\noverhead, so use it only when you need to handle high latency functions.\n\nLets demonstrate how to use it.\n\nTo your consumes decorator, add an `executor` option and set it to\n`\"DynamicTaskExecutor\"`, this will enable the consumer to handle high\nlatency functions effectively.\n\nYour consuming function should now look like this:\n\n``` python\n@app.consumes(executor=\"DynamicTaskExecutor\")\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nAnd the complete app should now look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(executor=\"DynamicTaskExecutor\")\nasync def on_hello_world(msg: HelloWorld):\n    logger.info(f\"Got msg: {msg}\")\n```\n\nYou can now run your app using the CLI commands described in this guide.\n\nLets send a `HelloWorld` message to the *hello_world* topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:\n\n``` shell\necho { \\\"msg\\\": \\\"Hello world\\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see a similar log as the one below.\n\n    [21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n    [21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n    [21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n    [21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [21539]: 23-06-15 07:19:25.154 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n    [21539]: 23-06-15 07:19:35.512 [INFO] consumer_example: Got msg: msg='Hello world'\n    Starting process cleanup, this may take a few seconds...\n    23-06-15 07:19:44.023 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 21539...\n    [21539]: 23-06-15 07:19:45.202 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [21539]: 23-06-15 07:19:45.203 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-15 07:19:45.313 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 21539 terminated.\n\nInside the log, you should see the “Got msg: msg='Hello world'\" being\nlogged by your consumer.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_12_Batch_Consuming.md",
    "content": "# Batch consuming\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nIf you want to consume data in batches `@consumes` decorator makes that\npossible for you. By typing a consumed msg object as a `list` of\nmessages the consumer will call your consuming function with a batch of\nmessages consumed from a single partition. Let’s demonstrate that now.\n\n## Consume function with batching\n\nTo consume messages in batches, you need to wrap you message type into a\nlist and the `@consumes` decorator will take care of the rest for you.\nYour consumes function will be called with batches grouped by partition\nnow.\n\n``` python\n@app.consumes(auto_offset_reset=\"earliest\")\nasync def on_hello_world(msg: List[HelloWorld]):\n    logger.info(f\"Got msg batch: {msg}\")\n```\n\n## App example\n\nWe will modify the app example from [@consumes\nbasics](/docs/guides/Guide_11_Consumes_Basics.md) guide to consume\n`HelloWorld` messages batch. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nimport asyncio\nfrom typing import List\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(auto_offset_reset=\"earliest\")\nasync def on_hello_world(msg: List[HelloWorld]):\n    logger.info(f\"Got msg batch: {msg}\")\n```\n\n## Send the messages to kafka topic\n\nLets send a couple of `HelloWorld` messages to the *hello_world* topic\nand check if our consumer kafka application has logged the received\nmessages batch. In your terminal, run the following command at least two\ntimes to create multiple messages in your kafka queue:\n\n    echo { ^\"msg^\": ^\"Hello world^\" }\n\n``` shell\necho { ^\"msg^\": ^\"Hello world^\" } | kafka-console-producer.bat --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nNow we can run the app. Copy the code of the example app in\nconsumer_example.py and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n```\n\nYou should see the your Kafka messages being logged in batches by your\nconsumer.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_21_Produces_Basics.md",
    "content": "# @produces basics\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nYou can use `@produces` decorator to produce messages to Kafka topics.\n\nIn this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic.\n\n## Import [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\n\nTo use the `@produces` decorator, frist we need to import the base\nFastKafka app to create our application.\n\n``` python\nfrom fastkafka import FastKafka\n```\n\n## Define the structure of the messages\n\nNext, you need to define the structure of the messages you want to send\nto the topic using [pydantic](https://docs.pydantic.dev/). For the guide\nwe’ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded.\n\nLet’s import `BaseModel` and `Field` from pydantic and create a simple\n`HelloWorld` class containing one string parameter `msg`\n\n``` python\nfrom pydantic import BaseModel, Field\n```\n\n``` python\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n```\n\n## Create a base FastKafka app\n\nNow we will create and define a base FastKafka app, replace the\n`<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values of your\nKafka bootstrap server\n\n``` python\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n```\n\n## Create a producer function and decorate it with `@produces`\n\nLet’s create a producer function that will produce `HelloWorld` messages\nto *hello_world* topic:\n\n``` python\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nNow you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic.\n\nBy default, the topic is determined from your function name, the “to\\_\"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is *hello_world*.\n\n## Instruct the app to start sending HelloWorld messages\n\nLet’s use `@run_in_background` decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second.\n\n``` python\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Final app\n\nYour app code should look like this:\n\n``` python\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\n``` python\nscript_file = \"producer_example.py\"\ncmd = \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\"\nmd(\n    f\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```\"\n)\n```\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    [84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n\n## Check if the message was sent to the Kafka topic\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n```\n\nYou should see the {“msg\": “Hello world!\"} messages in your topic.\n\n## Choosing a topic\n\nYou probably noticed that you didn’t define which topic you are sending\nthe message to, this is because the `@produces` decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default “to\\_\" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n*hello_world*.\n\n!!! warn \"New topics\"\n\n    Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n\nYou can choose your custom prefix by defining the `prefix` parameter in\nproduces decorator, like this:\n\n``` python\n\n@app.produces(prefix=\"send_to_\")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\nAlso, you can define the topic name completely by defining the `topic`\nin parameter in produces decorator, like this:\n\n``` python\n\n@app.produces(topic=\"my_special_topic\")\nasync def to_hello_world(msg: str) -> HelloWorld:\n    return HelloWorld(msg=msg)\n```\n\n## Message data\n\nThe return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app.\n\nIn this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n`b'{\"msg\":\"Hello world!\"}'`\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_22_Partition_Keys.md",
    "content": "# Defining a partition key\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nPartition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance.\n\nYou can define your partition keys when using the `@produces` decorator,\nthis guide will demonstrate to you this feature.\n\n## Return a key from the producing function\n\nTo define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md#fastkafka.KafkaEvent)\nclass and set the key value. Check the example below:\n\n``` python\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n```\n\nIn the example, we want to return the `HelloWorld` message class with\nthe key defined as *my_key*. So, we wrap the message and key into a\nKafkaEvent class and return it as such.\n\nWhile generating the documentation, the\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md#fastkafka.KafkaEvent)\nclass will be unwrapped and the `HelloWorld` class will be documented in\nthe definition of message type, same way if you didn’t use the key.\n\n!!! info \"Which key to choose?\"\n\n    Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n\n## App example\n\nWe will modify the app example from **@producer basics** guide to return\nthe `HelloWorld` with our key. The final app will look like this (make\nsure you replace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n    return KafkaEvent(HelloWorld(msg=msg), key=b\"my_key\")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n    while(True):\n        await to_hello_world(msg=\"Hello world!\")\n        await asyncio.sleep(1)\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n    [347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n    [347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n\n## Check if the message was sent to the Kafka topic with the desired key\n\nLets check the topic and see if there is a “Hello world!\" message in the\nhello_world topic with the defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the *my_key {“msg\": “Hello world!\"}* messages in your\ntopic appearing, the *my_key* part of the message is the key that we\ndefined in our producing function.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_23_Batch_Producing.md",
    "content": "# Batch producing\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nIf you want to send your data in batches `@produces` decorator makes\nthat possible for you. By returning a `list` of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker.\n\nThis guide will demonstrate how to use this feature.\n\n## Return a batch from the producing function\n\nTo define a batch that you want to produce to Kafka topic, you need to\nreturn the `List` of the messages that you want to be batched from your\nproducing function.\n\n``` python\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\nIn the example, we want to return the `HelloWorld` message class batch\nthat is created from a list of msgs we passed into our producing\nfunction.\n\nLets also prepare a backgound task that will send a batch of “hello\nworld\" messages when the app starts.\n\n``` python\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n```\n\n## App example\n\nWe will modify the app example from [@producer\nbasics](/docs/guides/Guide_21_Produces_Basics.md) guide to return the\n`HelloWorld` batch. The final app will look like this (make sure you\nreplace the `<url_of_your_kafka_bootstrap_server>` and\n`<port_of_your_kafka_bootstrap_server>` with the actual values):\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n    return [HelloWorld(msg=msg) for msg in msgs]\n```\n\n## Run the app\n\nNow we can run the app. Copy the code above in producer_example.py and\nrun it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n```\n\nAfter running the command, you should see this output in your terminal:\n\n    [46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n    [46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n    [46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n    [46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\n    Starting process cleanup, this may take a few seconds...\n    [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n    [46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n    [INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n\n## Check if the batch was sent to the Kafka topic with the defined key\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages in your topic.\n\n## Batch key\n\nTo define a key for your batch like in [Defining a partition\nkey](/docs/guides/Guide_22_Partition_Keys.md) guide you can wrap the\nreturning value in a\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md#fastkafka.KafkaEvent)\nclass. To learn more about defining a partition ke and\n[`KafkaEvent`](../api/fastkafka/KafkaEvent.md#fastkafka.KafkaEvent)\nclass, please, have a look at [Defining a partition\nkey](/docs/guides/Guide_22_Partition_Keys.md) guide.\n\nLet’s demonstrate that.\n\nTo define a key, we just need to modify our producing function, like\nthis:\n\n``` python\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\nNow our app looks like this:\n\n``` python\n\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n    msg: str = Field(\n        ...,\n        example=\"Hello\",\n        description=\"Demo hello world message\",\n    )\n\nkafka_brokers = {\n    \"demo_broker\": {\n        \"url\": \"<url_of_your_kafka_bootstrap_server>\",\n        \"description\": \"local demo kafka broker\",\n        \"port\": \"<port_of_your_kafka_bootstrap_server>\",\n    }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n    msgs=[f\"Hello world {i}\" for i in range(10)]\n    await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\"my_key\")\n```\n\n## Check if the batch was sent to the Kafka topic\n\nLets check the topic and see if there are “Hello world\" messages in the\nhello_world topic, containing a defined key. In your terminal run:\n\n``` shell\nkafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n```\n\nYou should see the batch of messages with the defined key in your topic.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",
    "content": "# Using multiple Kafka clusters\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nReady to take your FastKafka app to the next level? This guide shows you\nhow to connect to multiple Kafka clusters effortlessly. Consolidate\ntopics and produce messages across clusters like a pro. Unleash the full\npotential of your Kafka-powered app with FastKafka. Let’s dive in and\nelevate your application’s capabilities!\n\n### Test message\n\nTo showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called `TestMsg`. Here’s the\ndefinition of the `TestMsg` class:\n\n``` python\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n```\n\n## Defining multiple broker configurations\n\nWhen building a FastKafka application, you may need to consume messages\nfrom multiple Kafka clusters, each with its own set of broker\nconfigurations. FastKafka provides the flexibility to define different\nbroker clusters using the brokers argument in the consumes decorator.\nLet’s explore an example code snippet\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\n\nkafka_brokers_1 = dict(\n    development=dict(url=\"dev.server_1\", port=9092),\n    production=dict(url=\"prod.server_1\", port=9092),\n)\nkafka_brokers_2 = dict(\n    development=dict(url=\"dev.server_2\", port=9092),\n    production=dict(url=\"prod.server_1\", port=9092),\n)\n\napp = FastKafka(kafka_brokers=kafka_brokers_1, bootstrap_servers_id=\"development\")\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n    print(f\"Received on s1: {msg=}\")\n    await to_predictions_1(msg)\n\n\n@app.consumes(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n    print(f\"Received on s2: {msg=}\")\n    await to_predictions_2(msg)\n\n\n@app.produces(topic=\"predictions\")\nasync def to_predictions_1(msg: TestMsg) -> TestMsg:\n    return msg\n\n\n@app.produces(topic=\"predictions\", brokers=kafka_brokers_2)\nasync def to_predictions_2(msg: TestMsg) -> TestMsg:\n    return msg\n```\n\nIn this example, the application has two consumes endpoints, both of\nwhich will consume events from `preprocessed_signals` topic.\n`on_preprocessed_signals_1` will consume events from `kafka_brokers_1`\nconfiguration and `on_preprocessed_signals_2` will consume events from\n`kafka_brokers_2` configuration. When producing, `to_predictions_1` will\nproduce to `predictions` topic on `kafka_brokers_1` cluster and\n`to_predictions_2` will produce to `predictions` topic on\n`kafka_brokers_2` cluster.\n\n#### How it works\n\nThe `kafka_brokers_1` configuration represents the primary cluster,\nwhile `kafka_brokers_2` serves as an alternative cluster specified in\nthe decorator.\n\nUsing the FastKafka class, the app object is initialized with the\nprimary broker configuration (`kafka_brokers_1`). By default, the\n`@app.consumes` decorator without the brokers argument consumes messages\nfrom the `preprocessed_signals` topic on `kafka_brokers_1`.\n\nTo consume messages from a different cluster, the `@app.consumes`\ndecorator includes the `brokers` argument. This allows explicit\nspecification of the broker cluster in the `on_preprocessed_signals_2`\nfunction, enabling consumption from the same topic but using the\n`kafka_brokers_2` configuration.\n\nThe brokers argument can also be used in the @app.produces decorator to\ndefine multiple broker clusters for message production.\n\nIt’s important to ensure that all broker configurations have the same\nrequired settings as the primary cluster to ensure consistent behavior.\n\n## Testing the application\n\nTo test our FastKafka ‘mirroring’ application, we can use our testing\nframework. Lets take a look how it’s done:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\n    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\"signal_s1\"))\n    # Assert on_preprocessed_signals_1 consumed sent message\n    await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\n        TestMsg(msg=\"signal_s1\"), timeout=5\n    )\n    # Assert app has produced a prediction\n    await tester.mirrors[app.to_predictions_1].assert_called_with(\n        TestMsg(msg=\"signal_s1\"), timeout=5\n    )\n\n    # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\n    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\"signal_s2\"))\n    # Assert on_preprocessed_signals_2 consumed sent message\n    await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\n        TestMsg(msg=\"signal_s2\"), timeout=5\n    )\n    # Assert app has produced a prediction\n    await tester.mirrors[app.to_predictions_2].assert_called_with(\n        TestMsg(msg=\"signal_s2\"), timeout=5\n    )\n```\n\n    23-06-23 12:15:51.156 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-06-23 12:15:51.157 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-06-23 12:15:51.157 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n    23-06-23 12:15:51.158 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:15:51.158 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n    23-06-23 12:15:51.159 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:15:51.178 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n    23-06-23 12:15:51.178 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:15:51.179 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n    23-06-23 12:15:51.180 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n    23-06-23 12:15:51.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:15:51.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-06-23 12:15:51.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:15:51.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:15:51.187 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n    23-06-23 12:15:51.187 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:15:51.188 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:15:51.188 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:15:51.189 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n    23-06-23 12:15:51.190 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-06-23 12:15:51.191 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n    23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:15:51.193 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:15:51.194 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-06-23 12:15:51.194 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    Received on s1: msg=TestMsg(msg='signal_s1')\n    Received on s2: msg=TestMsg(msg='signal_s2')\n    23-06-23 12:15:56.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:15:56.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:15:56.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:15:56.183 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:15:56.184 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:15:56.184 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:15:56.185 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:15:56.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:15:56.188 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nThe usage of the `tester.mirrors` dictionary allows specifying the\ndesired topic/broker combination for sending the test messages,\nespecially when working with multiple Kafka clusters. This ensures that\nthe data is sent to the appropriate topic/broker based on the consuming\nfunction, and consumed from appropriate topic/broker based on the\nproducing function.\n\n## Running the application\n\nYou can run your application using `fastkafka run` CLI command in the\nsame way that you would run a single cluster app.\n\nTo start your app, copy the code above in multi_cluster_example.py and\nrun it by running:\n\nNow we can run the app. Copy the code above in multi_cluster_example.py,\nadjust your server configurations, and run it by running\n\n``` shell\nfastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\n```\n\nIn your app logs, you should see your app starting up and your two\nconsumer functions connecting to different kafka clusters.\n\n    [182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24092'}\n    [182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24093'}\n    [182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n    [182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n    [182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n    [182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n    [182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n    [182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n    Starting process cleanup, this may take a few seconds...\n    23-06-23 12:16:18.294 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 182747...\n    [182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:19.471 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 182747 terminated.\n\n## Application documentation\n\nAt the moment the documentation for multicluster app is not yet\nimplemented, but it is under development and you can expecti it soon!\n\n## Examples on how to use multiple broker configurations\n\n### Example \\#1\n\nIn this section, we’ll explore how you can effectively forward topics\nbetween different Kafka clusters, enabling seamless data synchronization\nfor your applications.\n\nImagine having two Kafka clusters, namely `kafka_brokers_1` and\n`kafka_brokers_2`, each hosting its own set of topics and messages. Now,\nif you want to forward a specific topic (in this case:\n`preprocessed_signals`) from kafka_brokers_1 to kafka_brokers_2,\nFastKafka provides an elegant solution.\n\nLet’s examine the code snippet that configures our application for topic\nforwarding:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_original(msg: TestMsg):\n    await to_preprocessed_signals_forward(msg)\n\n\n@app.produces(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\n    return data\n```\n\nHere’s how it works: our FastKafka application is configured to consume\nmessages from `kafka_brokers_1` and process them in the\n`on_preprocessed_signals_original` function. We want to forward these\nmessages to `kafka_brokers_2`. To achieve this, we define the\n`to_preprocessed_signals_forward` function as a producer, seamlessly\nproducing the processed messages to the preprocessed_signals topic\nwithin the `kafka_brokers_2` cluster.\n\n#### Testing\n\nTo test our FastKafka forwarding application, we can use our testing\nframework. Let’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg=\"signal\"))\n    await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\n```\n\n    23-06-23 12:16:31.689 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-06-23 12:16:31.690 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-06-23 12:16:31.691 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-06-23 12:16:31.691 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:31.701 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-06-23 12:16:31.702 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:31.702 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:31.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-06-23 12:16:31.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:31.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:31.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-06-23 12:16:31.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:31.708 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:31.708 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:31.709 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-06-23 12:16:31.709 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:16:35.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:35.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:35.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:35.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:35.705 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:35.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:35.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nWith the help of the **Tester** object, we can simulate and verify the\nbehavior of our FastKafka application. Here’s how it works:\n\n1.  We create an instance of the **Tester** by passing in our *app*\n    object, which represents our FastKafka application.\n\n2.  Using the **tester.mirrors** dictionary, we can send a message to a\n    specific Kafka broker and topic combination. In this case, we use\n    `tester.mirrors[app.on_preprocessed_signals_original]` to send a\n    TestMsg message with the content “signal\" to the appropriate Kafka\n    broker and topic.\n\n3.  After sending the message, we can perform assertions on the mirrored\n    function using\n    `tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)`.\n    This assertion ensures that the mirrored function has been called\n    within a specified timeout period (in this case, 5 seconds).\n\n### Example \\#2\n\nIn this section, we’ll explore how you can effortlessly consume data\nfrom multiple sources, process it, and aggregate the results into a\nsingle topic on a specific cluster.\n\nImagine you have two Kafka clusters: **kafka_brokers_1** and\n**kafka_brokers_2**, each hosting its own set of topics and messages.\nNow, what if you want to consume data from both clusters, perform some\nprocessing, and produce the results to a single topic on\n**kafka_brokers_1**? FastKafka has got you covered!\n\nLet’s take a look at the code snippet that configures our application\nfor aggregating multiple clusters:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n    print(f\"Default: {msg=}\")\n    await to_predictions(msg)\n\n\n@app.consumes(topic=\"preprocessed_signals\", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n    print(f\"Specified: {msg=}\")\n    await to_predictions(msg)\n\n\n@app.produces(topic=\"predictions\")\nasync def to_predictions(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction: {prediction}\")\n    return [prediction]\n```\n\nHere’s the idea: our FastKafka application is set to consume messages\nfrom the topic “preprocessed_signals\" on **kafka_brokers_1** cluster, as\nwell as from the same topic on **kafka_brokers_2** cluster. We have two\nconsuming functions, `on_preprocessed_signals_1` and\n`on_preprocessed_signals_2`, that handle the messages from their\nrespective clusters. These functions perform any required processing, in\nthis case, just calling the to_predictions function.\n\nThe exciting part is that the to_predictions function acts as a\nproducer, sending the processed results to the “predictions\" topic on\n**kafka_brokers_1 cluster**. By doing so, we effectively aggregate the\ndata from multiple sources into a single topic on a specific cluster.\n\nThis approach enables you to consume data from multiple Kafka clusters,\nprocess it, and produce the aggregated results to a designated topic.\nWhether you’re generating predictions, performing aggregations, or any\nother form of data processing, FastKafka empowers you to harness the\nfull potential of multiple clusters.\n\n#### Testing\n\nLet’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\"signal\"))\n    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\"signal\"))\n    await tester.on_predictions.assert_called(timeout=5)\n```\n\n    23-06-23 12:16:41.222 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-06-23 12:16:41.223 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-06-23 12:16:41.224 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-06-23 12:16:41.224 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:41.239 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-06-23 12:16:41.239 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:41.240 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-06-23 12:16:41.240 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-06-23 12:16:41.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:41.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-06-23 12:16:41.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:41.248 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-06-23 12:16:41.248 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    Default: msg=TestMsg(msg='signal')\n    Sending prediction: msg='signal'\n    Specified: msg=TestMsg(msg='signal')\n    Sending prediction: msg='signal'\n    23-06-23 12:16:45.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:45.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:45.244 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:45.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:45.246 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:45.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:45.247 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nHere’s how the code above works:\n\n1.  Within an `async with` block, create an instance of the Tester by\n    passing in your app object, representing your FastKafka application.\n\n2.  Using the tester.mirrors dictionary, you can send messages to\n    specific Kafka broker and topic combinations. In this case, we use\n    `tester.mirrors[app.on_preprocessed_signals_1]` and\n    `tester.mirrors[app.on_preprocessed_signals_2]` to send TestMsg\n    messages with the content “signal\" to the corresponding Kafka broker\n    and topic combinations.\n\n3.  After sending the messages, you can perform assertions on the\n    **on_predictions** function using\n    `tester.on_predictions.assert_called(timeout=5)`. This assertion\n    ensures that the on_predictions function has been called within a\n    specified timeout period (in this case, 5 seconds).\n\n### Example \\#3\n\nIn some scenarios, you may need to produce messages to multiple Kafka\nclusters simultaneously. FastKafka simplifies this process by allowing\nyou to configure your application to produce messages to multiple\nclusters effortlessly. Let’s explore how you can achieve this:\n\nConsider the following code snippet that demonstrates producing messages\nto multiple clusters:\n\n``` python\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url=\"server_1\", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url=\"server_2\", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic=\"preprocessed_signals\")\nasync def on_preprocessed_signals(msg: TestMsg):\n    print(f\"{msg=}\")\n    await to_predictions_1(TestMsg(msg=\"prediction\"))\n    await to_predictions_2(TestMsg(msg=\"prediction\"))\n\n\n@app.produces(topic=\"predictions\")\nasync def to_predictions_1(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction to s1: {prediction}\")\n    return [prediction]\n\n\n@app.produces(topic=\"predictions\", brokers=kafka_brokers_2)\nasync def to_predictions_2(prediction: TestMsg) -> TestMsg:\n    print(f\"Sending prediction to s2: {prediction}\")\n    return [prediction]\n```\n\nHere’s what you need to know about producing to multiple clusters:\n\n1.  We define two Kafka broker configurations: **kafka_brokers_1** and\n    **kafka_brokers_2**, representing different clusters with their\n    respective connection details.\n\n2.  We create an instance of the FastKafka application, specifying\n    **kafka_brokers_1** as the primary cluster for producing messages.\n\n3.  The `on_preprocessed_signals` function serves as a consumer,\n    handling incoming messages from the “preprocessed_signals\" topic.\n    Within this function, we invoke two producer functions:\n    `to_predictions_1` and `to_predictions_2`.\n\n4.  The `to_predictions_1` function sends predictions to the\n    “predictions\" topic on *kafka_brokers_1* cluster.\n\n5.  Additionally, the `to_predictions_2` function sends the same\n    predictions to the “predictions\" topic on *kafka_brokers_2* cluster.\n    This allows for producing the same data to multiple clusters\n    simultaneously.\n\nBy utilizing this approach, you can seamlessly produce messages to\nmultiple Kafka clusters, enabling you to distribute data across\ndifferent environments or leverage the strengths of various clusters.\n\nFeel free to customize the producer functions as per your requirements,\nperforming any necessary data transformations or enrichment before\nsending the predictions.\n\nWith FastKafka, producing to multiple clusters becomes a breeze,\nempowering you to harness the capabilities of multiple environments\neffortlessly.\n\n#### Testing\n\nLet’s take a look at the testing code snippet:\n\n``` python\nfrom fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n    await tester.to_preprocessed_signals(TestMsg(msg=\"signal\"))\n    await tester.mirrors[to_predictions_1].assert_called(timeout=5)\n    await tester.mirrors[to_predictions_2].assert_called(timeout=5)\n```\n\n    23-06-23 12:16:49.903 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    23-06-23 12:16:49.904 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    23-06-23 12:16:49.904 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-06-23 12:16:49.905 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:49.905 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n    23-06-23 12:16:49.906 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:49.921 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n    23-06-23 12:16:49.921 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    23-06-23 12:16:49.921 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:49.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-06-23 12:16:49.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:49.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:49.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:49.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n    23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:49.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n    23-06-23 12:16:49.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:49.926 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n    23-06-23 12:16:49.928 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n    23-06-23 12:16:49.929 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    msg=TestMsg(msg='signal')\n    Sending prediction to s1: msg='prediction'\n    Sending prediction to s2: msg='prediction'\n    23-06-23 12:16:53.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:53.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:53.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:53.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:53.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:53.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\nHere’s how you can perform the necessary tests:\n\n1.  Within an async with block, create an instance of the **Tester** by\n    passing in your app object, representing your FastKafka application.\n\n2.  Using the `tester.to_preprocessed_signals` method, you can send a\n    TestMsg message with the content “signal\".\n\n3.  After sending the message, you can perform assertions on the\n    to_predictions_1 and to_predictions_2 functions using\n    `tester.mirrors[to_predictions_1].assert_called(timeout=5)` and\n    `tester.mirrors[to_predictions_2].assert_called(timeout=5)`. These\n    assertions ensure that the respective producer functions have\n    produced data to their respective topic/broker combinations.\n\nBy employing this testing approach, you can verify that the producing\nfunctions correctly send messages to their respective clusters. The\ntesting framework provided by FastKafka enables you to ensure the\naccuracy and reliability of your application’s producing logic.\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",
    "content": "# Deploying FastKafka using Docker\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## Building a Docker Image\n\nTo build a Docker image for a FastKafka project, we need the following\nitems:\n\n1.  A library that is built using FastKafka.\n2.  A file in which the requirements are specified. This could be a\n    requirements.txt file, a setup.py file, or even a wheel file.\n3.  A Dockerfile to build an image that will include the two files\n    mentioned above.\n\n### Creating FastKafka Code\n\nLet’s create a\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)-based\napplication and write it to the `application.py` file based on the\n[tutorial](/docs#tutorial).\n\n``` python\n# content of the \"application.py\" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n### Creating requirements.txt file\n\nThe above code only requires FastKafka. So, we will add only that to the\n`requirements.txt` file, but you can add additional requirements to it\nas well.\n\n``` txt\nfastkafka>=0.3.0\n```\n\nHere we are using `requirements.txt` to store the project’s\ndependencies. However, other methods like `setup.py`, `pipenv`, and\n`wheel` files can also be used. `setup.py` is commonly used for\npackaging and distributing Python modules, while `pipenv` is a tool used\nfor managing virtual environments and package dependencies. `wheel`\nfiles are built distributions of Python packages that can be installed\nwith pip.\n\n### Creating Dockerfile\n\n``` dockerfile\n# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD [\"fastkafka\", \"run\", \"--num-workers\", \"2\", \"--kafka-broker\", \"production\", \"application:kafka_app\"]\n```\n\n1.  Start from the official Python base image.\n\n2.  Set the current working directory to `/project`.\n\n    This is where we’ll put the `requirements.txt` file and the\n    `application.py` file.\n\n3.  Copy the `application.py` file and `requirements.txt` file inside\n    the `/project` directory.\n\n4.  Install the package dependencies in the requirements file.\n\n    The `--no-cache-dir` option tells `pip` to not save the downloaded\n    packages locally, as that is only if `pip` was going to be run again\n    to install the same packages, but that’s not the case when working\n    with containers.\n\n    The `--upgrade` option tells `pip` to upgrade the packages if they\n    are already installed.\n\n5.  Set the **command** to run the `fastkafka run` command.\n\n    `CMD` takes a list of strings, each of these strings is what you\n    would type in the command line separated by spaces.\n\n    This command will be run from the **current working directory**, the\n    same `/project` directory you set above with `WORKDIR /project`.\n\n    We supply additional parameters `--num-workers` and `--kafka-broker`\n    for the run command. Finally, we specify the location of our\n    FastKafka application as a command argument.\n\n    To learn more about `fastkafka run` command please check the [CLI\n    docs](../../cli/fastkafka/#fastkafka-run).\n\n### Build the Docker Image\n\nNow that all the files are in place, let’s build the container image.\n\n1.  Go to the project directory (where your `Dockerfile` is, containing\n    your `application.py` file).\n\n2.  Run the following command to build the image:\n\n    ``` cmd\n    docker build -t fastkafka_project_image .\n    ```\n\n    This command will create a docker image with the name\n    `fastkafka_project_image` and the `latest` tag.\n\nThat’s it! You have now built a docker image for your FastKafka project.\n\n### Start the Docker Container\n\nRun a container based on the built image:\n\n``` cmd\ndocker run -d --name fastkafka_project_container fastkafka_project_image\n```\n\n## Additional Security\n\n`Trivy` is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here’s\nhow you can use `trivy` to scan your `fastkafka_project_image`:\n\n1.  Install `trivy` on your local machine by following the instructions\n    provided in the [official `trivy`\n    documentation](https://aquasecurity.github.io/trivy/latest/getting-started/installation/).\n\n2.  Run the following command to scan your fastkafka_project_image:\n\n    ``` cmd\n    trivy image fastkafka_project_image\n    ```\n\n    This command will scan your `fastkafka_project_image` for any\n    vulnerabilities and provide you with a report of its findings.\n\n3.  Fix any vulnerabilities identified by `trivy`. You can do this by\n    updating the vulnerable package to a more secure version or by using\n    a different package altogether.\n\n4.  Rebuild your `fastkafka_project_image` and repeat steps 2 and 3\n    until `trivy` reports no vulnerabilities.\n\nBy using `trivy` to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities.\n\n## Example repo\n\nA\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n[here](https://github.com/airtai/sample_fastkafka_project/)\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",
    "content": "# Using Redpanda to test FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n## What is FastKafka?\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n## What is Redpanda?\n\nRedpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda.\n\nFrom [redpanda.com](https://redpanda.com/):\n\n> Redpanda is a Kafka®-compatible streaming data platform that is proven\n> to be 10x faster and 6x lower in total costs. It is also JVM-free,\n> ZooKeeper®-free, Jepsen-tested and source available.\n\nSome of the advantages of Redpanda over Kafka are\n\n1.  A single binary with built-in everything, no ZooKeeper® or JVM\n    needed.\n2.  Costs upto 6X less than Kafka.\n3.  Up to 10x lower average latencies and up to 6x faster Kafka\n    transactions without compromising correctness.\n\nTo learn more about Redpanda, please visit their\n[website](https://redpanda.com/) or checkout this [blog\npost](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\ncomparing Redpanda and Kafka’s performance benchmarks.\n\n## Example repo\n\nA sample FastKafka-based library that uses Redpanda for testing, based\non this guide, can be found\n[here](https://github.com/airtai/sample_fastkafka_with_redpanda).\n\n## The process\n\nHere are the steps we’ll be walking through to build our example:\n\n1.  Set up the prerequisites.\n2.  Clone the example repo.\n3.  Explain how to write an application using FastKafka.\n4.  Explain how to write a test case to test FastKafka with Redpanda.\n5.  Run the test case and produce/consume messages.\n\n## 1. Prerequisites\n\nBefore starting, make sure you have the following prerequisites set up:\n\n1.  **Python 3.x**: A Python 3.x installation is required to run\n    FastKafka. You can download the latest version of Python from the\n    [official website](https://www.python.org/downloads/). You’ll also\n    need to have pip installed and updated, which is Python’s package\n    installer.\n2.  **Docker Desktop**: Docker is used to run Redpanda, which is\n    required for testing FastKafka. You can download and install Docker\n    Desktop from the [official\n    website](https://www.docker.com/products/docker-desktop/).\n3.  **Git**: You’ll need to have Git installed to clone the example\n    repo. You can download Git from the [official\n    website](https://git-scm.com/downloads).\n\n## 2. Cloning and setting up the example repo\n\nTo get started with the example code, clone the [GitHub\nrepository](https://github.com/airtai/sample_fastkafka_with_redpanda) by\nrunning the following command in your terminal:\n\n``` cmd\ngit clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n```\n\nThis will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files.\n\n### Create a virtual environment\n\nBefore writing any code, let’s [create a new virtual\nenvironment](https://docs.python.org/3/library/venv.html#module-venv)\nfor our project.\n\nA virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects.\n\nTo create a new virtual environment, run the following commands in your\nterminal:\n\n``` cmd\npython3 -m venv venv\n```\n\nThis will create a new directory called `venv` in your project\ndirectory, which will contain the virtual environment.\n\nTo activate the virtual environment, run the following command:\n\n``` cmd\nsource venv/bin/activate\n```\n\nThis will change your shell’s prompt to indicate that you are now\nworking inside the virtual environment.\n\nFinally, run the following command to upgrade `pip`, the Python package\ninstaller:\n\n``` cmd\npip install --upgrade pip\n```\n\n### Install Python dependencies\n\nNext, let’s install the required Python dependencies. In this guide,\nwe’ll be using\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nto write our application code and `pytest` and `pytest-asyncio` to test\nit.\n\nYou can install the dependencies from the `requirements.txt` file\nprovided in the cloned repository by running:\n\n``` cmd\npip install -r requirements.txt\n```\n\nThis will install all the required packages and their dependencies.\n\n## 3. Writing server code\n\nThe `application.py` file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:\n\n### Preparing the demo model\n\nFirst we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model.\n\nWe will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started.\n\n``` python\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n    # Load the ML model\n    X, y = load_iris(return_X_y=True)\n    ml_models[\"iris_predictor\"] = LogisticRegression(random_state=0, max_iter=500).fit(\n        X, y\n    )\n    yield\n    # Clean up the ML models and release the resources\n    ml_models.clear()\n```\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines two message classes for use in a FastKafka\napplication:\n\n- The `IrisInputData` class is used to represent input data for a\n  predictive model. It has four fields of type\n  [`NonNegativeFloat`](https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat),\n  which is a subclass of float that only allows non-negative floating\n  point values.\n\n- The `IrisPrediction` class is used to represent the output of the\n  predictive model. It has a single field `species` of type string\n  representing the predicted species.\n\nThese message classes will be used to parse and validate incoming data\nin Kafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n    sepal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal length in cm\"\n    )\n    sepal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Sepal width in cm\"\n    )\n    petal_length: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal length in cm\"\n    )\n    petal_width: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Petal width in cm\"\n    )\n\n\nclass IrisPrediction(BaseModel):\n    species: str = Field(..., example=\"setosa\", description=\"Predicted species\")\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker.\n\nNext, an instance of the\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nclass is initialized with the minimum required arguments:\n\n- `kafka_brokers`: a dictionary used for generating documentation\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Iris predictions\",\n    kafka_brokers=kafka_brokers,\n    lifespan=lifespan,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `IrisInputData` message class. Specifying the\n  type of the single argument is instructing the Pydantic to use\n  `IrisInputData.parse_raw()` on the consumed message before passing it\n  to the user defined function `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_predictions` function,\n  which specifies that this function should produce a message to the\n  “predictions\" Kafka topic whenever it is called. The `to_predictions`\n  function takes a single integer argument `species_class` representing\n  one of three possible strign values predicted by the mdoel. It creates\n  a new `IrisPrediction` message using this value and then returns it.\n  The framework will call the `IrisPrediction.json().encode(\"utf-8\")`\n  function on the returned value and produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: IrisInputData):\n    species_class = ml_models[\"iris_predictor\"].predict(\n        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n    )[0]\n\n    await to_predictions(species_class)\n\n\n@kafka_app.produces(topic=\"predictions\")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n    iris_species = [\"setosa\", \"versicolor\", \"virginica\"]\n\n    prediction = IrisPrediction(species=iris_species[species_class])\n    return prediction\n```\n\n## 4. Writing the test code\n\nThe service can be tested using the\n[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\ninstance which can be configured to start a [Redpanda\nbroker](../../api/fastkafka/testing/LocalRedpandaBroker/) for testing\npurposes. The `test.py` file in the cloned repository contains the\nfollowing code for testing.\n\n``` python\nimport pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n    sepal_length=0.1,\n    sepal_width=0.2,\n    petal_length=0.3,\n    petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n    # Start Tester app and create local Redpanda broker for testing\n    async with Tester(kafka_app).using_local_redpanda(\n        tag=\"v23.1.2\", listener_port=9092\n    ) as tester:\n        # Send IrisInputData message to input_data topic\n        await tester.to_input_data(msg)\n\n        # Assert that the kafka_app responded with IrisPrediction in predictions topic\n        await tester.awaited_mocks.on_predictions.assert_awaited_with(\n            IrisPrediction(species=\"setosa\"), timeout=2\n        )\n```\n\nThe\n[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\nmodule utilizes uses\n[`LocalRedpandaBroker`](../api/fastkafka/testing/LocalRedpandaBroker.md#fastkafka.testing.LocalRedpandaBroker)\nto start and stop a Redpanda broker for testing purposes using Docker\n\n## 5. Running the tests\n\nWe can run the tests which is in `test.py` file by executing the\nfollowing command:\n\n``` cmd\npytest test.py\n```\n\nThis will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:\n\n``` cmd\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item                                                                 \n\ntest.py .                                                                  [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n```\n\nRunning the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable.\n\n### Recap\n\nWe have created an Iris classification model and encapulated it into our\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napplication. The app will consume the `IrisInputData` from the\n`input_data` topic and produce the predictions to `predictions` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our\n    [`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\n    class with `Redpanda` broker which mirrors the developed app topics\n    for testing purposes\n\n3.  Sent `IrisInputData` message to `input_data` topic\n\n4.  Asserted and checked that the developed iris classification service\n    has reacted to `IrisInputData` message\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",
    "content": "# Using FastAPI to Run FastKafka Application\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\nWhen deploying a FastKafka application, the default approach is to\nutilize the [`fastkafka run`](/docs/cli/fastkafka#fastkafka-run) CLI\ncommand. This command allows you to launch your FastKafka application as\na standalone service. However, if you already have a FastAPI application\nin place and wish to run FastKafka application alongside it, you have an\nalternative option.\n\nFastKafka provides a method called\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md#fastkafka._application.app.FastKafka.fastapi_lifespan)\nthat leverages [FastAPI’s\nlifespan](https://fastapi.tiangolo.com/advanced/events/#lifespan-events)\nfeature. This method allows you to run your FastKafka application\ntogether with your existing FastAPI app, seamlessly integrating their\nfunctionalities. By using the\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod, you can start the FastKafka application within the same process\nas the FastAPI app.\n\nThe\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod ensures that both FastAPI and FastKafka are initialized and start\nworking simultaneously. This approach enables the execution of\nKafka-related tasks, such as producing and consuming messages, while\nalso handling HTTP requests through FastAPI’s routes.\n\nBy combining FastAPI and FastKafka in this manner, you can build a\ncomprehensive application that harnesses the power of both frameworks.\nWhether you require real-time messaging capabilities or traditional HTTP\nendpoints, this approach allows you to leverage the strengths of FastAPI\nand FastKafka within a single deployment setup.\n\n## Prerequisites\n\n1.  A basic knowledge of\n    [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\n    is needed to proceed with this guide. If you are not familiar with\n    [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka),\n    please go through the [tutorial](/docs#tutorial) first.\n2.  [`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\n    and `FastAPI` libraries needs to be installed.\n\nThis guide will provide a step-by-step explanation, taking you through\neach stage individually, before combining all the components in the\nfinal section for a comprehensive understanding of the process.\n\n## 1. Basic FastKafka app\n\nIn this step, we will begin by creating a simple FastKafka application.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Greetings\",\n    kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n    await to_greetings(TestMsg(msg=f\"Hello {msg.msg}\"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n    return greeting\n```\n\nIn the above example, we consume messages from a topic called `names`,\nwe prepend “Hello\" to the message, and send it back to another topic\ncalled `greetings`.\n\nWe now have a simple\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napp to produce and consume from two topics.\n\n## 2. Using fastapi_lifespan method\n\nIn this step of the guide, we will explore the integration of a\nFastKafka application with a FastAPI application using the\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod. The\n[`FastKafka.fastapi_lifespan`](../api/fastkafka/FastKafka.md#fastkafka._application.app.FastKafka.fastapi_lifespan)\nmethod is a feature provided by FastKafka, which allows you to\nseamlessly integrate a FastKafka application with a FastAPI application\nby leveraging FastAPI’s lifespan feature.\n\n``` python\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name=\"localhost\"))\n\n\n@fastapi_app.get(\"/hello\")\nasync def hello():\n    return {\"msg\": \"hello there\"}\n```\n\nIn the above example, a new instance of the `FastAPI` app is created,\nand when the app is started using uvicorn, it also runs the\n[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\napplication concurrently.\n\n## Putting it all together\n\nLet’s put the above code together and write it in a file called\n`fast_apps.py`.\n\n``` python\n# content of the \"fast_apps.py\" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Greetings\",\n    kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n    msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n    await to_greetings(TestMsg(msg=f\"Hello {msg.msg}\"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n    return greeting\n\n\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(\"localhost\"))\n\n@fastapi_app.get(\"/hello\")\nasync def hello():\n    return {\"msg\": \"hello there\"}\n```\n\nFinally, you can run the FastAPI application using a web server of your\nchoice, such as Uvicorn or Hypercorn by running the below command:\n\n``` cmd\nuvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\n```\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/index.md",
    "content": "# FastKafka\n\n<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->\n\n<b>Effortless Kafka integration for your web services</b>\n\n------------------------------------------------------------------------\n\n![PyPI](https://img.shields.io/pypi/v/fastkafka.png) ![PyPI -\nDownloads](https://img.shields.io/pypi/dm/fastkafka.png) ![PyPI - Python\nVersion](https://img.shields.io/pypi/pyversions/fastkafka.png)\n\n![GitHub Workflow\nStatus](https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml)\n![CodeQL](https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg)\n![Dependency\nReview](https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg)\n\n![GitHub](https://img.shields.io/github/license/airtai/fastkafka.png)\n\n------------------------------------------------------------------------\n\n[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/),\n[AIOKafka](https://github.com/aio-libs/aiokafka) and\n[AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects.\n\n------------------------------------------------------------------------\n\n#### ⭐⭐⭐ Stay in touch ⭐⭐⭐\n\nPlease show your support and stay in touch by:\n\n- giving our [GitHub repository](https://github.com/airtai/fastkafka/) a\n  star, and\n\n- joining our [Discord server](https://discord.gg/CJWmYpyFbc).\n\nYour support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!\n\n------------------------------------------------------------------------\n\n#### 🐝🐝🐝 We were busy lately 🐝🐝🐝\n\n![Activity](https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg \"Repobeats analytics image\")\n\n## Install\n\nFastKafka works on Windows, macOS, Linux, and most Unix-style operating\nsystems. You can install base version of FastKafka with `pip` as usual:\n\n``` sh\npip install fastkafka\n```\n\nTo install FastKafka with testing features please use:\n\n``` sh\npip install fastkafka[test]\n```\n\nTo install FastKafka with asyncapi docs please use:\n\n``` sh\npip install fastkafka[docs]\n```\n\nTo install FastKafka with all the features please use:\n\n``` sh\npip install fastkafka[test,docs]\n```\n\n## Tutorial\n\nYou can start an interactive tutorial in Google Colab by clicking the\nbutton below:\n\n<a href=\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb\" target=\"_blank\">\n<img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open in Colab\" />\n</a>\n\n## Writing server code\n\nTo demonstrate FastKafka simplicity of using `@produces` and `@consumes`\ndecorators, we will focus on a simple app.\n\nThe app will consume jsons containig positive floats from one topic, log\nthem and then produce incremented values to another topic.\n\n### Messages\n\nFastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic’s\n[`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages.\n\nThis example defines one `Data` mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n`NonNegativeFloat` field `data` that will be logged and “processed\"\nbefore being produced to another topic.\n\nThese message class will be used to parse and validate incoming data in\nKafka consumers and producers.\n\n``` python\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n```\n\n### Application\n\nThis example shows how to initialize a FastKafka application.\n\nIt starts by defining a dictionary called `kafka_brokers`, which\ncontains two entries: `\"localhost\"` and `\"production\"`, specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker.\n\nNext, an object of the\n[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\nclass is initialized with the minimum set of arguments:\n\n- `kafka_brokers`: a dictionary used for generation of documentation\n\nWe will also import and create a logger so that we can log the incoming\ndata in our consuming function.\n\n``` python\nfrom logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger(\"Demo Kafka app\")\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n### Function decorators\n\nFastKafka provides convenient function decorators `@kafka_app.consumes`\nand `@kafka_app.produces` to allow you to delegate the actual process of\n\n- consuming and producing data to Kafka, and\n\n- decoding and encoding JSON encode messages\n\nfrom user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries.\n\nThese decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration.\n\nThis following example shows how to use the `@kafka_app.consumes` and\n`@kafka_app.produces` decorators in a FastKafka application:\n\n- The `@kafka_app.consumes` decorator is applied to the `on_input_data`\n  function, which specifies that this function should be called whenever\n  a message is received on the “input_data\" Kafka topic. The\n  `on_input_data` function takes a single argument which is expected to\n  be an instance of the `Data` message class. Specifying the type of the\n  single argument is instructing the Pydantic to use `Data.parse_raw()`\n  on the consumed message before passing it to the user defined function\n  `on_input_data`.\n\n- The `@produces` decorator is applied to the `to_output_data` function,\n  which specifies that this function should produce a message to the\n  “output_data\" Kafka topic whenever it is called. The `to_output_data`\n  function takes a single float argument `data`. It it increments the\n  data returns it wrapped in a `Data` object. The framework will call\n  the `Data.json().encode(\"utf-8\")` function on the returned value and\n  produce it to the specified topic.\n\n``` python\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\n## Testing the service\n\nThe service can be tested using the\n[`Tester`](./api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\ninstances which internally starts InMemory implementation of Kafka\nbroker.\n\nThe Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies.\n\n``` python\nfrom fastkafka.testing import Tester\n\nmsg = Data(\n    data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n    # Send Data message to input_data topic\n    await tester.to_input_data(msg)\n\n    # Assert that the kafka_app responded with incremented data in output_data topic\n    await tester.awaited_mocks.on_output_data.assert_awaited_with(\n        Data(data=1.1), timeout=2\n    )\n```\n\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [INFO] Demo Kafka app: Got data: 0.1\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n    [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n\n### Recap\n\nWe have created a simple FastKafka application. The app will consume the\n`Data` from the `input_data` topic, log it and produce the incremented\ndata to `output_data` topic.\n\nTo test the app we have:\n\n1.  Created the app\n\n2.  Started our Tester class which mirrors the developed app topics for\n    testing purposes\n\n3.  Sent Data message to `input_data` topic\n\n4.  Asserted and checked that the developed service has reacted to Data\n    message\n\n## Running the service\n\nThe service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file `\"application.py\"`\n\n``` python\n# content of the \"application.py\" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n    data: NonNegativeFloat = Field(\n        ..., example=0.5, description=\"Float data example\"\n    )\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic=\"input_data\", auto_offset_reset=\"latest\")\nasync def on_input_data(msg: Data):\n    logger.info(f\"Got data: {msg.data}\")\n    await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic=\"output_data\")\nasync def to_output_data(data: float) -> Data:\n    processed_data = Data(data=data+1.0)\n    return processed_data\n```\n\nTo run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command.\n\n``` sh\nfastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n```\n\nAfter running the command, you should see the following output in your\ncommand line:\n\n    [1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n    [1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n    [1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n    [1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n    [1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n    [1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n    [1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n    [1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n    [1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n    [1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n    [1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n    [1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    Starting process cleanup, this may take a few seconds...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n    23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n    [1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n    23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n    23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n\n## Documentation\n\nThe kafka app comes with builtin documentation generation using\n[AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\n\nAsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:\n\n``` sh\nfastkafka docs install_deps\n```\n\n    23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n\nTo generate the documentation programatically you just need to call the\nfollowing command:\n\n``` sh\nfastkafka docs generate application:kafka_app\n```\n\n    23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n    23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\nThis will generate the *asyncapi* folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:\n\n``` sh\nls -l asyncapi\n```\n\n    total 8\n    drwxr-xr-x 4 root root 4096 May 31 11:38 docs\n    drwxr-xr-x 2 root root 4096 May 31 11:38 spec\n\nIn docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our `fastkafka docs serve`\nCLI command (more on that in our guides).\n\nIn spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application.\n\nWe can locally preview the generated documentation by running the\nfollowing command:\n\n``` sh\nfastkafka docs serve application:kafka_app\n```\n\n    23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n    23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n    23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\n    Done! ✨\n    Check out your shiny new generated files at /content/asyncapi/docs.\n\n\n    Serving documentation on http://127.0.0.1:8000\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET / HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/global.min.css HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /js/asyncapi-ui.min.js HTTP/1.1\" 200 -\n    127.0.0.1 - - [31/May/2023 11:39:14] \"GET /css/asyncapi.min.css HTTP/1.1\" 200 -\n    Interupting serving of documentation and cleaning up...\n\nFrom the parameters passed to the application constructor, we get the\ndocumentation bellow:\n\n``` python\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n    \"localhost\": {\n        \"url\": \"localhost\",\n        \"description\": \"local development kafka broker\",\n        \"port\": 9092,\n    },\n    \"production\": {\n        \"url\": \"kafka.airt.ai\",\n        \"description\": \"production kafka broker\",\n        \"port\": 9092,\n        \"protocol\": \"kafka-secure\",\n        \"security\": {\"type\": \"plain\"},\n    },\n}\n\nkafka_app = FastKafka(\n    title=\"Demo Kafka app\",\n    kafka_brokers=kafka_brokers,\n)\n```\n\n![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\n\nThe following documentation snippet are for the consumer as specified in\nthe code above:\n\n![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\n\nThe following documentation snippet are for the producer as specified in\nthe code above:\n\n![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\n\nFinally, all messages as defined as subclasses of *BaseModel* are\ndocumented as well:\n\n![Kafka\\_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\n\n## License\n\nFastKafka is licensed under the Apache License 2.0\n\nA permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code.\n\nThe full text of the license can be found\n[here](https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE).\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/overrides/css/extra.css",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/overrides/js/extra.js",
    "content": ""
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/overrides/js/math.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_docs/version-0.8.0/overrides/js/mathjax.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "docusaurus/versioned_sidebars/version-0.5.0-sidebars.json",
    "content": "{\n  \"tutorialSidebar\": [\n    \"index\",\n    {\n      \"Guides\": [\n        {\n          \"Writing services\": [\n            \"guides/Guide_11_Consumes_Basics\",\n            \"guides/Guide_21_Produces_Basics\",\n            \"guides/Guide_22_Partition_Keys\",\n            \"guides/Guide_05_Lifespan_Handler\",\n            \"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka\"\n          ]\n        },\n        {\n          \"Testing\": [\n            \"guides/Guide_31_Using_redpanda_to_test_fastkafka\"\n          ]\n        },\n        {\n          \"Documentation generation\": [\n            \"guides/Guide_04_Github_Actions_Workflow\"\n          ]\n        },\n        {\n          \"Deployment\": [\n            \"guides/Guide_30_Using_docker_to_deploy_fastkafka\"\n          ]\n        },\n        {\n          \"Benchmarking\": [\n            \"guides/Guide_06_Benchmarking_FastKafka\"\n          ]\n        }\n      ]\n    },\n    {\n      \"items\": [\n        \"api/fastkafka/FastKafka\",\n        \"api/fastkafka/KafkaEvent\",\n        {\n          \"items\": [\n            \"api/fastkafka/testing/ApacheKafkaBroker\",\n            \"api/fastkafka/testing/LocalRedpandaBroker\",\n            \"api/fastkafka/testing/Tester\"\n          ],\n          \"label\": \"testing\",\n          \"type\": \"category\"\n        }\n      ],\n      \"label\": \"API\",\n      \"type\": \"category\"\n    },\n    {\n      \"CLI\": [\n        \"cli/fastkafka\",\n        \"cli/run_fastkafka_server_process\"\n      ]\n    },\n    \"CHANGELOG\"\n  ]\n}\n"
  },
  {
    "path": "docusaurus/versioned_sidebars/version-0.6.0-sidebars.json",
    "content": "{\n  \"tutorialSidebar\": [\n    \"index\",\n    {\n      \"Guides\": [\n        {\n          \"Writing services\": [\n            \"guides/Guide_11_Consumes_Basics\",\n            \"guides/Guide_21_Produces_Basics\",\n            \"guides/Guide_22_Partition_Keys\",\n            \"guides/Guide_23_Batch_Producing\",\n            \"guides/Guide_05_Lifespan_Handler\",\n            \"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka\"\n          ]\n        },\n        {\n          \"Testing\": [\n            \"guides/Guide_31_Using_redpanda_to_test_fastkafka\"\n          ]\n        },\n        {\n          \"Documentation generation\": [\n            \"guides/Guide_04_Github_Actions_Workflow\"\n          ]\n        },\n        {\n          \"Deployment\": [\n            \"guides/Guide_30_Using_docker_to_deploy_fastkafka\"\n          ]\n        },\n        {\n          \"Benchmarking\": [\n            \"guides/Guide_06_Benchmarking_FastKafka\"\n          ]\n        }\n      ]\n    },\n    {\n      \"API\": [\n        \"api/fastkafka/EventMetadata\",\n        \"api/fastkafka/FastKafka\",\n        \"api/fastkafka/KafkaEvent\",\n        {\n          \"encoder\": [\n            \"api/fastkafka/encoder/AvroBase\",\n            \"api/fastkafka/encoder/avro_decoder\",\n            \"api/fastkafka/encoder/avro_encoder\",\n            \"api/fastkafka/encoder/avsc_to_pydantic\",\n            \"api/fastkafka/encoder/json_decoder\",\n            \"api/fastkafka/encoder/json_encoder\"\n          ]\n        },\n        {\n          \"testing\": [\n            \"api/fastkafka/testing/ApacheKafkaBroker\",\n            \"api/fastkafka/testing/LocalRedpandaBroker\",\n            \"api/fastkafka/testing/Tester\"\n          ]\n        }\n      ]\n    },\n    {\n      \"CLI\": [\n        \"cli/fastkafka\",\n        \"cli/run_fastkafka_server_process\"\n      ]\n    },\n    \"LICENSE\",\n    \"CONTRIBUTING\",\n    \"CHANGELOG\"\n  ]\n}\n"
  },
  {
    "path": "docusaurus/versioned_sidebars/version-0.7.0-sidebars.json",
    "content": "{\n  \"tutorialSidebar\": [\n    \"index\",\n    {\n      \"Guides\": [\n        {\n          \"Writing services\": [\n            \"guides/Guide_11_Consumes_Basics\",\n            \"guides/Guide_12_Batch_Consuming\",\n            \"guides/Guide_21_Produces_Basics\",\n            \"guides/Guide_22_Partition_Keys\",\n            \"guides/Guide_23_Batch_Producing\",\n            \"guides/Guide_05_Lifespan_Handler\",\n            \"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka\",\n            \"guides/Guide_24_Using_Multiple_Kafka_Clusters\"\n          ]\n        },\n        {\n          \"Testing\": [\n            \"guides/Guide_31_Using_redpanda_to_test_fastkafka\"\n          ]\n        },\n        {\n          \"Documentation generation\": [\n            \"guides/Guide_04_Github_Actions_Workflow\"\n          ]\n        },\n        {\n          \"Deployment\": [\n            \"guides/Guide_30_Using_docker_to_deploy_fastkafka\",\n            \"guides/Guide_32_Using_fastapi_to_run_fastkafka_application\"\n          ]\n        },\n        {\n          \"Benchmarking\": [\n            \"guides/Guide_06_Benchmarking_FastKafka\"\n          ]\n        }\n      ]\n    },\n    {\n      \"API\": [\n        \"api/fastkafka/EventMetadata\",\n        \"api/fastkafka/FastKafka\",\n        \"api/fastkafka/KafkaEvent\",\n        {\n          \"encoder\": [\n            \"api/fastkafka/encoder/AvroBase\",\n            \"api/fastkafka/encoder/avro_decoder\",\n            \"api/fastkafka/encoder/avro_encoder\",\n            \"api/fastkafka/encoder/avsc_to_pydantic\",\n            \"api/fastkafka/encoder/json_decoder\",\n            \"api/fastkafka/encoder/json_encoder\"\n          ]\n        },\n        {\n          \"executors\": [\n            \"api/fastkafka/executors/DynamicTaskExecutor\",\n            \"api/fastkafka/executors/SequentialExecutor\"\n          ]\n        },\n        {\n          \"testing\": [\n            \"api/fastkafka/testing/ApacheKafkaBroker\",\n            \"api/fastkafka/testing/LocalRedpandaBroker\",\n            \"api/fastkafka/testing/Tester\"\n          ]\n        }\n      ]\n    },\n    {\n      \"CLI\": [\n        \"cli/fastkafka\",\n        \"cli/run_fastkafka_server_process\"\n      ]\n    },\n    \"LICENSE\",\n    \"CONTRIBUTING\",\n    \"CHANGELOG\"\n  ]\n}\n"
  },
  {
    "path": "docusaurus/versioned_sidebars/version-0.7.1-sidebars.json",
    "content": "{\n  \"tutorialSidebar\": [\n    \"index\",\n    {\n      \"Guides\": [\n        {\n          \"Writing services\": [\n            \"guides/Guide_11_Consumes_Basics\",\n            \"guides/Guide_12_Batch_Consuming\",\n            \"guides/Guide_21_Produces_Basics\",\n            \"guides/Guide_22_Partition_Keys\",\n            \"guides/Guide_23_Batch_Producing\",\n            \"guides/Guide_05_Lifespan_Handler\",\n            \"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka\",\n            \"guides/Guide_24_Using_Multiple_Kafka_Clusters\"\n          ]\n        },\n        {\n          \"Testing\": [\n            \"guides/Guide_31_Using_redpanda_to_test_fastkafka\"\n          ]\n        },\n        {\n          \"Documentation generation\": [\n            \"guides/Guide_04_Github_Actions_Workflow\"\n          ]\n        },\n        {\n          \"Deployment\": [\n            \"guides/Guide_30_Using_docker_to_deploy_fastkafka\",\n            \"guides/Guide_32_Using_fastapi_to_run_fastkafka_application\"\n          ]\n        },\n        {\n          \"Benchmarking\": [\n            \"guides/Guide_06_Benchmarking_FastKafka\"\n          ]\n        }\n      ]\n    },\n    {\n      \"API\": [\n        \"api/fastkafka/EventMetadata\",\n        \"api/fastkafka/FastKafka\",\n        \"api/fastkafka/KafkaEvent\",\n        {\n          \"encoder\": [\n            \"api/fastkafka/encoder/AvroBase\",\n            \"api/fastkafka/encoder/avro_decoder\",\n            \"api/fastkafka/encoder/avro_encoder\",\n            \"api/fastkafka/encoder/avsc_to_pydantic\",\n            \"api/fastkafka/encoder/json_decoder\",\n            \"api/fastkafka/encoder/json_encoder\"\n          ]\n        },\n        {\n          \"executors\": [\n            \"api/fastkafka/executors/DynamicTaskExecutor\",\n            \"api/fastkafka/executors/SequentialExecutor\"\n          ]\n        },\n        {\n          \"testing\": [\n            \"api/fastkafka/testing/ApacheKafkaBroker\",\n            \"api/fastkafka/testing/LocalRedpandaBroker\",\n            \"api/fastkafka/testing/Tester\"\n          ]\n        }\n      ]\n    },\n    {\n      \"CLI\": [\n        \"cli/fastkafka\",\n        \"cli/run_fastkafka_server_process\"\n      ]\n    },\n    \"LICENSE\",\n    \"CONTRIBUTING\",\n    \"CHANGELOG\"\n  ]\n}\n"
  },
  {
    "path": "docusaurus/versioned_sidebars/version-0.8.0-sidebars.json",
    "content": "{\n  \"tutorialSidebar\": [\n    \"index\",\n    {\n      \"Guides\": [\n        {\n          \"Writing services\": [\n            \"guides/Guide_11_Consumes_Basics\",\n            \"guides/Guide_12_Batch_Consuming\",\n            \"guides/Guide_21_Produces_Basics\",\n            \"guides/Guide_22_Partition_Keys\",\n            \"guides/Guide_23_Batch_Producing\",\n            \"guides/Guide_05_Lifespan_Handler\",\n            \"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka\",\n            \"guides/Guide_24_Using_Multiple_Kafka_Clusters\"\n          ]\n        },\n        {\n          \"Testing\": [\n            \"guides/Guide_31_Using_redpanda_to_test_fastkafka\"\n          ]\n        },\n        {\n          \"Documentation generation\": [\n            \"guides/Guide_04_Github_Actions_Workflow\"\n          ]\n        },\n        {\n          \"Deployment\": [\n            \"guides/Guide_30_Using_docker_to_deploy_fastkafka\",\n            \"guides/Guide_32_Using_fastapi_to_run_fastkafka_application\"\n          ]\n        },\n        {\n          \"Benchmarking\": [\n            \"guides/Guide_06_Benchmarking_FastKafka\"\n          ]\n        }\n      ]\n    },\n    {\n      \"API\": [\n        \"api/fastkafka/EventMetadata\",\n        \"api/fastkafka/FastKafka\",\n        \"api/fastkafka/KafkaEvent\",\n        {\n          \"encoder\": [\n            \"api/fastkafka/encoder/AvroBase\",\n            \"api/fastkafka/encoder/avro_decoder\",\n            \"api/fastkafka/encoder/avro_encoder\",\n            \"api/fastkafka/encoder/avsc_to_pydantic\",\n            \"api/fastkafka/encoder/json_decoder\",\n            \"api/fastkafka/encoder/json_encoder\"\n          ]\n        },\n        {\n          \"executors\": [\n            \"api/fastkafka/executors/DynamicTaskExecutor\",\n            \"api/fastkafka/executors/SequentialExecutor\"\n          ]\n        },\n        {\n          \"testing\": [\n            \"api/fastkafka/testing/ApacheKafkaBroker\",\n            \"api/fastkafka/testing/LocalRedpandaBroker\",\n            \"api/fastkafka/testing/Tester\"\n          ]\n        }\n      ]\n    },\n    {\n      \"CLI\": [\n        \"cli/fastkafka\",\n        \"cli/run_fastkafka_server_process\"\n      ]\n    },\n    \"LICENSE\",\n    \"CONTRIBUTING\",\n    \"CHANGELOG\"\n  ]\n}\n"
  },
  {
    "path": "docusaurus/versions.json",
    "content": "[\n  \"0.8.0\",\n  \"0.7.1\",\n  \"0.7.0\",\n  \"0.6.0\",\n  \"0.5.0\"\n]\n"
  },
  {
    "path": "fastkafka/__init__.py",
    "content": "__version__ = \"0.9.0rc0\"\n# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/010_Application_export.ipynb.\n\n# %% auto 0\n__all__ = ['dummy']\n\n# %% ../nbs/010_Application_export.ipynb 1\nfrom ._application.app import FastKafka\nfrom ._components.meta import export\nfrom ._components.producer_decorator import KafkaEvent\nfrom ._components.aiokafka_consumer_loop import EventMetadata\n\n__all__ = [\"FastKafka\", \"KafkaEvent\", \"EventMetadata\"]\n\n# %% ../nbs/010_Application_export.ipynb 2\n@export(\"_dummy\")\ndef dummy() -> None:\n    pass\n"
  },
  {
    "path": "fastkafka/_aiokafka_imports.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/000_AIOKafkaImports.ipynb.\n\n# %% auto 0\n__all__ = ['dummy']\n\n# %% ../nbs/000_AIOKafkaImports.ipynb 1\nfrom aiokafka import AIOKafkaConsumer, AIOKafkaProducer\n\nfrom ._components.meta import export\n\n__all__ = [\n    \"AIOKafkaConsumer\",\n    \"AIOKafkaProducer\",\n]\n\n# %% ../nbs/000_AIOKafkaImports.ipynb 2\n@export(\"_dummy\")\ndef dummy() -> None:\n    pass\n"
  },
  {
    "path": "fastkafka/_application/__init__.py",
    "content": ""
  },
  {
    "path": "fastkafka/_application/app.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/015_FastKafka.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'FastKafka', 'AwaitedMock']\n\n# %% ../../nbs/015_FastKafka.ipynb 1\nimport asyncio\nimport functools\nimport inspect\nimport json\nimport types\nfrom asyncio import iscoroutinefunction  # do not use the version from inspect\nfrom collections import namedtuple\nfrom contextlib import asynccontextmanager\nfrom copy import deepcopy\nfrom datetime import datetime, timedelta\nfrom functools import wraps\nfrom inspect import signature\nfrom pathlib import Path\nfrom typing import *\nfrom unittest.mock import AsyncMock, MagicMock\n\nimport anyio\nfrom pydantic import BaseModel\n\nfrom fastkafka._components.aiokafka_consumer_loop import (\n    aiokafka_consumer_loop,\n    sanitize_kafka_config,\n)\nfrom fastkafka._components.asyncapi import (\n    ConsumeCallable,\n    ContactInfo,\n    KafkaBroker,\n    KafkaBrokers,\n    KafkaServiceInfo,\n    export_async_spec,\n)\n\nimport fastkafka._aiokafka_imports\nfrom .._aiokafka_imports import AIOKafkaConsumer, AIOKafkaProducer\nfrom .._components.benchmarking import _benchmark\nfrom .._components.logger import get_logger\nfrom .._components.meta import delegates, export, filter_using_signature, patch\nfrom .._components.producer_decorator import ProduceCallable, producer_decorator\nfrom .._components.task_streaming import StreamExecutor\nfrom .._components.helpers import remove_suffix\n\n# %% ../../nbs/015_FastKafka.ipynb 2\nif TYPE_CHECKING:\n    from fastapi import FastAPI\n\n# %% ../../nbs/015_FastKafka.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../nbs/015_FastKafka.ipynb 9\n@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer, but=[\"bootstrap_servers\"])\n@delegates(\n    fastkafka._aiokafka_imports.AIOKafkaProducer, but=[\"bootstrap_servers\"], keep=True\n)\ndef _get_kafka_config(\n    bootstrap_servers_id: str = \"localhost\",\n    **kwargs: Any,\n) -> Dict[str, Any]:\n    \"\"\"Get kafka config\"\"\"\n    allowed_keys = set(signature(_get_kafka_config).parameters.keys())\n    if not set(kwargs.keys()) <= allowed_keys:\n        unallowed_keys = \", \".join(\n            sorted([f\"'{x}'\" for x in set(kwargs.keys()).difference(allowed_keys)])\n        )\n        raise ValueError(f\"Unallowed key arguments passed: {unallowed_keys}\")\n    retval = kwargs.copy()\n\n    # todo: check this values\n    config_defaults = {\n        \"bootstrap_servers_id\": bootstrap_servers_id,\n        \"auto_offset_reset\": \"earliest\",\n        \"max_poll_records\": 100,\n    }\n    for key, value in config_defaults.items():\n        if key not in retval:\n            retval[key] = value\n\n    return retval\n\n# %% ../../nbs/015_FastKafka.ipynb 12\ndef _get_kafka_brokers(\n    kafka_brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = None\n) -> KafkaBrokers:\n    \"\"\"Get Kafka brokers\n\n    Args:\n        kafka_brokers: Kafka brokers\n\n    \"\"\"\n    if kafka_brokers is None:\n        retval: KafkaBrokers = KafkaBrokers(\n            brokers={\n                \"localhost\": KafkaBroker(  # type: ignore\n                    url=\"https://localhost\",\n                    description=\"Local (dev) Kafka broker\",\n                    port=\"9092\",\n                    grouping=\"localhost\",\n                )\n            }\n        )\n    else:\n        if isinstance(kafka_brokers, KafkaBrokers):\n            return kafka_brokers\n\n        retval = KafkaBrokers(\n            brokers={\n                k: (\n                    [\n                        KafkaBroker.model_validate_json(\n                            unwrapped_v.model_dump_json()\n                            if hasattr(unwrapped_v, \"model_dump_json\")\n                            else json.dumps(unwrapped_v)\n                        )\n                        for unwrapped_v in v\n                    ]\n                    if isinstance(v, list)\n                    else KafkaBroker.model_validate_json(\n                        v.model_dump_json()\n                        if hasattr(v, \"model_dump_json\")\n                        else json.dumps(v)\n                    )\n                )\n                for k, v in kafka_brokers.items()\n            }\n        )\n\n    return retval\n\n# %% ../../nbs/015_FastKafka.ipynb 14\ndef _get_broker_addr_list(\n    brokers: Union[List[KafkaBroker], KafkaBroker]\n) -> Union[str, List[str]]:\n    if isinstance(brokers, list):\n        return [f\"{broker.url}:{broker.port}\" for broker in brokers]\n    else:\n        return f\"{brokers.url}:{brokers.port}\"\n\n# %% ../../nbs/015_FastKafka.ipynb 16\ndef _get_topic_name(\n    topic_callable: Union[ConsumeCallable, ProduceCallable], prefix: str = \"on_\"\n) -> str:\n    \"\"\"Get topic name\n    Args:\n        topic_callable: a function\n        prefix: prefix of the name of the function followed by the topic name\n\n    Returns:\n        The name of the topic\n    \"\"\"\n    topic = topic_callable.__name__\n    if not topic.startswith(prefix) or len(topic) <= len(prefix):\n        raise ValueError(f\"Function name '{topic}' must start with {prefix}\")\n    topic = topic[len(prefix) :]\n\n    return topic\n\n# %% ../../nbs/015_FastKafka.ipynb 18\ndef _get_contact_info(\n    name: str = \"Author\",\n    url: str = \"https://www.google.com\",\n    email: str = \"noreply@gmail.com\",\n) -> ContactInfo:\n    return ContactInfo(name=name, url=url, email=email)  # type: ignore\n\n# %% ../../nbs/015_FastKafka.ipynb 20\nI = TypeVar(\"I\", bound=BaseModel)\nO = TypeVar(\"O\", BaseModel, Awaitable[BaseModel])\n\nF = TypeVar(\"F\", bound=Callable)\n\n# %% ../../nbs/015_FastKafka.ipynb 21\n@export(\"fastkafka\")\nclass FastKafka:\n    @delegates(_get_kafka_config)\n    def __init__(\n        self,\n        *,\n        title: Optional[str] = None,\n        description: Optional[str] = None,\n        version: Optional[str] = None,\n        contact: Optional[Dict[str, str]] = None,\n        kafka_brokers: Optional[Dict[str, Any]] = None,\n        root_path: Optional[Union[Path, str]] = None,\n        lifespan: Optional[Callable[[\"FastKafka\"], AsyncContextManager[None]]] = None,\n        **kwargs: Any,\n    ):\n        \"\"\"Creates FastKafka application\n\n        Args:\n            title: optional title for the documentation. If None,\n                the title will be set to empty string\n            description: optional description for the documentation. If\n                None, the description will be set to empty string\n            version: optional version for the documentation. If None,\n                the version will be set to empty string\n            contact: optional contact for the documentation. If None, the\n                contact will be set to placeholder values:\n                name='Author' url=HttpUrl('https://www.google.com', ) email='noreply@gmail.com'\n            kafka_brokers: dictionary describing kafka brokers used for setting\n                the bootstrap server when running the applicationa and for\n                generating documentation. Defaults to\n                    {\n                        \"localhost\": {\n                            \"url\": \"localhost\",\n                            \"description\": \"local kafka broker\",\n                            \"port\": \"9092\",\n                        }\n                    }\n            root_path: path to where documentation will be created\n            lifespan: asynccontextmanager that is used for setting lifespan hooks.\n                __aenter__ is called before app start and __aexit__ after app stop.\n                The lifespan is called whe application is started as async context\n                manager, e.g.:`async with kafka_app...`\n\n        \"\"\"\n\n        # this is needed for documentation generation\n        self._title = title if title is not None else \"\"\n        self._description = description if description is not None else \"\"\n        self._version = version if version is not None else \"\"\n        if contact is not None:\n            self._contact_info = _get_contact_info(**contact)\n        else:\n            self._contact_info = _get_contact_info()\n\n        self._kafka_service_info = KafkaServiceInfo(\n            title=self._title,\n            version=self._version,\n            description=self._description,\n            contact=self._contact_info,\n        )\n\n        if kafka_brokers is None:\n            kafka_brokers = {\n                \"localhost\": {\n                    \"url\": \"localhost\",\n                    \"description\": \"local kafka broker\",\n                    \"port\": \"9092\",\n                }\n            }\n\n        self._kafka_brokers = _get_kafka_brokers(kafka_brokers)\n\n        self._override_brokers: List[KafkaBrokers] = []\n\n        self._root_path = Path(\".\") if root_path is None else Path(root_path)\n        self._root_path.mkdir(exist_ok=True, parents=True)\n\n        self._asyncapi_path = self._root_path / \"asyncapi\"\n\n        # this is used as default parameters for creating AIOProducer and AIOConsumer objects\n        self._kafka_config = _get_kafka_config(**kwargs)\n\n        #\n        self._consumers_store: Dict[\n            str,\n            Tuple[\n                ConsumeCallable,\n                Callable[[bytes, Type[BaseModel]], Any],\n                Union[str, StreamExecutor, None],\n                Optional[KafkaBrokers],\n                Dict[str, Any],\n            ],\n        ] = {}\n\n        self._producers_store: Dict[  # type: ignore\n            str,\n            Tuple[\n                ProduceCallable,\n                fastkafka._aiokafka_imports.AIOKafkaProducer,\n                Optional[KafkaBrokers],\n                Dict[str, Any],\n            ],\n        ] = {}\n\n        self._producers_list: List[fastkafka._aiokafka_imports.AIOKafkaProducer] = []  # type: ignore\n\n        self.benchmark_results: Dict[str, Dict[str, Any]] = {}\n\n        # background tasks\n        self._scheduled_bg_tasks: List[Callable[..., Coroutine[Any, Any, Any]]] = []\n        self._bg_task_group_generator: Optional[anyio.abc.TaskGroup] = None\n        self._bg_tasks_group: Optional[anyio.abc.TaskGroup] = None\n\n        # todo: use this for errrors\n        self._on_error_topic: Optional[str] = None\n\n        self.lifespan = lifespan\n        self.lifespan_ctx: Optional[AsyncContextManager[None]] = None\n\n        self._is_started: bool = False\n        self._is_shutting_down: bool = False\n        self._kafka_consumer_tasks: List[asyncio.Task[Any]] = []\n        self._kafka_producer_tasks: List[asyncio.Task[Any]] = []\n        self._running_bg_tasks: List[asyncio.Task[Any]] = []\n        self.run = False\n\n        # testing functions\n        self.AppMocks = None\n        self.mocks = None\n        self.awaited_mocks = None\n\n    @property\n    def is_started(self) -> bool:\n        \"\"\"Property indicating whether the FastKafka object is started.\n\n        The is_started property indicates if the FastKafka object is currently\n        in a started state. This implies that all background tasks, producers,\n        and consumers have been initiated, and the object is successfully connected\n        to the Kafka broker.\n\n        Returns:\n            bool: True if the object is started, False otherwise.\n        \"\"\"\n        return self._is_started\n\n    def set_kafka_broker(self, kafka_broker_name: str) -> None:\n        \"\"\"\n        Sets the Kafka broker to start FastKafka with\n\n        Args:\n            kafka_broker_name: The name of the Kafka broker to start FastKafka\n\n        Raises:\n            ValueError: If the provided kafka_broker_name is not found in dictionary of kafka_brokers\n        \"\"\"\n\n        if kafka_broker_name not in self._kafka_brokers.brokers:\n            raise ValueError(\n                f\"Given kafka_broker_name '{kafka_broker_name}' is not found in kafka_brokers, available options are {self._kafka_brokers.brokers.keys()}\"\n            )\n\n        self._kafka_config[\"bootstrap_servers_id\"] = kafka_broker_name\n\n    async def __aenter__(self) -> \"FastKafka\":\n        if self.lifespan is not None:\n            self.lifespan_ctx = self.lifespan(self)\n            await self.lifespan_ctx.__aenter__()\n        await self._start()\n        return self\n\n    async def __aexit__(\n        self,\n        exc_type: Optional[Type[BaseException]],\n        exc: Optional[BaseException],\n        tb: Optional[types.TracebackType],\n    ) -> None:\n        await self._stop()\n        if self.lifespan_ctx is not None:\n            await self.lifespan_ctx.__aexit__(exc_type, exc, tb)\n\n    async def _start(self) -> None:\n        raise NotImplementedError\n\n    async def _stop(self) -> None:\n        raise NotImplementedError\n\n    def consumes(\n        self,\n        topic: Optional[str] = None,\n        decoder: str = \"json\",\n        *,\n        prefix: str = \"on_\",\n        brokers: Optional[KafkaBrokers] = None,\n        description: Optional[str] = None,\n        **kwargs: Dict[str, Any],\n    ) -> ConsumeCallable:\n        raise NotImplementedError\n\n    def produces(\n        self,\n        topic: Optional[str] = None,\n        encoder: str = \"json\",\n        *,\n        prefix: str = \"to_\",\n        brokers: Optional[KafkaBrokers] = None,\n        description: Optional[str] = None,\n        **kwargs: Dict[str, Any],\n    ) -> ProduceCallable:\n        raise NotImplementedError\n\n    def benchmark(\n        self,\n        interval: Union[int, timedelta] = 1,\n        *,\n        sliding_window_size: Optional[int] = None,\n    ) -> Callable[[F], F]:\n        raise NotImplementedError\n\n    def run_in_background(\n        self,\n    ) -> Callable[[], Any]:\n        raise NotImplementedError\n\n    def _populate_consumers(\n        self,\n        is_shutting_down_f: Callable[[], bool],\n    ) -> None:\n        raise NotImplementedError\n\n    def get_topics(self) -> Iterable[str]:\n        raise NotImplementedError\n\n    async def _populate_producers(self) -> None:\n        raise NotImplementedError\n\n    async def _populate_bg_tasks(self) -> None:\n        raise NotImplementedError\n\n    def create_docs(self) -> None:\n        raise NotImplementedError\n\n    def create_mocks(self) -> None:\n        raise NotImplementedError\n\n    async def _shutdown_consumers(self) -> None:\n        raise NotImplementedError\n\n    async def _shutdown_producers(self) -> None:\n        raise NotImplementedError\n\n    async def _shutdown_bg_tasks(self) -> None:\n        raise NotImplementedError\n\n# %% ../../nbs/015_FastKafka.ipynb 27\ndef _get_decoder_fn(decoder: str) -> Callable[[bytes, Type[BaseModel]], Any]:\n    \"\"\"\n    Imports and returns decoder function based on input\n    \"\"\"\n    if decoder == \"json\":\n        from fastkafka._components.encoder.json import json_decoder\n\n        return json_decoder\n    elif decoder == \"avro\":\n        try:\n            from fastkafka._components.encoder.avro import avro_decoder\n        except ModuleNotFoundError:\n            raise ModuleNotFoundError(\n                \"Unable to import avro packages. Please install FastKafka using the command 'fastkafka[avro]'\"\n            )\n        return avro_decoder\n    else:\n        raise ValueError(f\"Unknown decoder - {decoder}\")\n\n# %% ../../nbs/015_FastKafka.ipynb 29\ndef _prepare_and_check_brokers(\n    app: FastKafka, kafka_brokers: Optional[Union[Dict[str, Any], KafkaBrokers]]\n) -> Optional[KafkaBrokers]:\n    if kafka_brokers is not None:\n        prepared_brokers = _get_kafka_brokers(kafka_brokers)\n        if prepared_brokers.brokers.keys() != app._kafka_brokers.brokers.keys():\n            raise ValueError(\n                f\"To override application default brokers, you must define all of the broker options. Default defined: {set(app._kafka_brokers.brokers.keys())}, override defined: {set(prepared_brokers.brokers.keys())}\"\n            )\n        return prepared_brokers\n    return None\n\n# %% ../../nbs/015_FastKafka.ipynb 30\ndef _resolve_key(key: str, dictionary: Dict[str, Any]) -> str:\n    i = 0\n    resolved_key = f\"{key}_{i}\"\n    while resolved_key in dictionary:\n        i += 1\n        resolved_key = f\"{key}_{i}\"\n    return resolved_key\n\n# %% ../../nbs/015_FastKafka.ipynb 31\n@patch\n@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer)\ndef consumes(\n    self: FastKafka,\n    topic: Optional[str] = None,\n    decoder: Union[str, Callable[[bytes, Type[BaseModel]], Any]] = \"json\",\n    *,\n    executor: Union[str, StreamExecutor, None] = None,\n    brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = None,\n    prefix: str = \"on_\",\n    description: Optional[str] = None,\n    **kwargs: Dict[str, Any],\n) -> Callable[[ConsumeCallable], ConsumeCallable]:\n    \"\"\"Decorator registering the callback called when a message is received in a topic.\n\n    This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n    Args:\n        topic: Kafka topic that the consumer will subscribe to and execute the\n            decorated function when it receives a message from the topic,\n            default: None. If the topic is not specified, topic name will be\n            inferred from the decorated function name by stripping the defined prefix\n        decoder: Decoder to use to decode messages consumed from the topic,\n                default: json - By default, it uses json decoder to decode\n                bytes to json string and then it creates instance of pydantic\n                BaseModel. It also accepts custom decoder function.\n        executor: Type of executor to choose for consuming tasks. Avaliable options\n                are \"SequentialExecutor\" and \"DynamicTaskExecutor\". The default option is\n                \"SequentialExecutor\" which will execute the consuming tasks sequentially.\n                If the consuming tasks have high latency it is recommended to use\n                \"DynamicTaskExecutor\" which will wrap the consuming functions into tasks\n                and run them in on asyncio loop in background. This comes with a cost of\n                increased overhead so use it only in cases when your consume functions have\n                high latency such as database queries or some other type of networking.\n        prefix: Prefix stripped from the decorated function to define a topic name\n                if the topic argument is not passed, default: \"on_\". If the decorated\n                function name is not prefixed with the defined prefix and topic argument\n                is not passed, then this method will throw ValueError\n        brokers: Optional argument specifying multiple broker clusters for consuming\n                messages from different Kafka clusters in FastKafka.\n        description: Optional description of the consuming function async docs.\n                If not provided, consuming function __doc__ attr will be used.\n\n    Returns:\n        A function returning the same function\n\n    Throws:\n        ValueError\n\n    \"\"\"\n\n    def _decorator(\n        on_topic: ConsumeCallable,\n        topic: Optional[str] = topic,\n        decoder: Union[str, Callable[[bytes, Type[BaseModel]], Any]] = decoder,\n        executor: Union[str, StreamExecutor, None] = executor,\n        brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = brokers,\n        description: Optional[str] = description,\n        kwargs: Dict[str, Any] = kwargs,\n    ) -> ConsumeCallable:\n        topic_resolved: str = (\n            _get_topic_name(topic_callable=on_topic, prefix=prefix)\n            if topic is None\n            else topic\n        )\n\n        decoder_fn = _get_decoder_fn(decoder) if isinstance(decoder, str) else decoder\n\n        prepared_broker = _prepare_and_check_brokers(self, brokers)\n        if prepared_broker is not None:\n            self._override_brokers.append(prepared_broker.brokers)  # type: ignore\n        else:\n            prepared_broker = self._kafka_brokers\n\n        if description is not None:\n            setattr(on_topic, \"description\", description)\n\n        self._consumers_store[_resolve_key(topic_resolved, self._consumers_store)] = (\n            on_topic,\n            decoder_fn,\n            executor,\n            prepared_broker,\n            kwargs,\n        )\n        setattr(self, on_topic.__name__, on_topic)\n        return on_topic\n\n    return _decorator\n\n# %% ../../nbs/015_FastKafka.ipynb 34\ndef _get_encoder_fn(encoder: str) -> Callable[[BaseModel], bytes]:\n    \"\"\"\n    Imports and returns encoder function based on input\n    \"\"\"\n    if encoder == \"json\":\n        from fastkafka._components.encoder.json import json_encoder\n\n        return json_encoder\n    elif encoder == \"avro\":\n        try:\n            from fastkafka._components.encoder.avro import avro_encoder\n        except ModuleNotFoundError:\n            raise ModuleNotFoundError(\n                \"Unable to import avro packages. Please install FastKafka using the command 'fastkafka[avro]'\"\n            )\n        return avro_encoder\n    else:\n        raise ValueError(f\"Unknown encoder - {encoder}\")\n\n# %% ../../nbs/015_FastKafka.ipynb 36\n@patch\n@delegates(fastkafka._aiokafka_imports.AIOKafkaProducer)\ndef produces(\n    self: FastKafka,\n    topic: Optional[str] = None,\n    encoder: Union[str, Callable[[BaseModel], bytes]] = \"json\",\n    *,\n    prefix: str = \"to_\",\n    brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = None,\n    description: Optional[str] = None,\n    **kwargs: Dict[str, Any],\n) -> Callable[[ProduceCallable], ProduceCallable]:\n    \"\"\"Decorator registering the callback called when delivery report for a produced message is received\n\n    This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\n\n    Args:\n        topic: Kafka topic that the producer will send returned values from\n            the decorated function to, default: None- If the topic is not\n            specified, topic name will be inferred from the decorated function\n            name by stripping the defined prefix.\n        encoder: Encoder to use to encode messages before sending it to topic,\n                default: json - By default, it uses json encoder to convert\n                pydantic basemodel to json string and then encodes the string to bytes\n                using 'utf-8' encoding. It also accepts custom encoder function.\n        prefix: Prefix stripped from the decorated function to define a topic\n            name if the topic argument is not passed, default: \"to_\". If the\n            decorated function name is not prefixed with the defined prefix\n            and topic argument is not passed, then this method will throw ValueError\n        brokers: Optional argument specifying multiple broker clusters for consuming\n            messages from different Kafka clusters in FastKafka.\n        description: Optional description of the producing function async docs.\n                If not provided, producing function __doc__ attr will be used.\n\n    Returns:\n        A function returning the same function\n\n    Raises:\n        ValueError: when needed\n    \"\"\"\n\n    def _decorator(\n        to_topic: ProduceCallable,\n        topic: Optional[str] = topic,\n        brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = brokers,\n        description: Optional[str] = description,\n        kwargs: Dict[str, Any] = kwargs,\n    ) -> ProduceCallable:\n        topic_resolved: str = (\n            _get_topic_name(topic_callable=to_topic, prefix=prefix)\n            if topic is None\n            else topic\n        )\n\n        topic_key = _resolve_key(topic_resolved, self._producers_store)\n\n        prepared_broker = _prepare_and_check_brokers(self, brokers)\n        if prepared_broker is not None:\n            self._override_brokers.append(prepared_broker.brokers)  # type: ignore\n        else:\n            prepared_broker = self._kafka_brokers\n\n        if description is not None:\n            setattr(to_topic, \"description\", description)\n\n        self._producers_store[topic_key] = (\n            to_topic,\n            None,\n            prepared_broker,\n            kwargs,\n        )\n        encoder_fn = _get_encoder_fn(encoder) if isinstance(encoder, str) else encoder\n        decorated = producer_decorator(\n            self._producers_store,\n            to_topic,\n            topic_key,\n            encoder_fn=encoder_fn,\n        )\n        setattr(self, to_topic.__name__, decorated)\n        return decorated\n\n    return _decorator\n\n# %% ../../nbs/015_FastKafka.ipynb 39\n@patch\ndef get_topics(self: FastKafka) -> Iterable[str]:\n    \"\"\"\n    Get all topics for both producing and consuming.\n\n    Returns:\n        A set of topics for both producing and consuming.\n    \"\"\"\n    produce_topics = set([remove_suffix(topic) for topic in self._producers_store])\n    consume_topics = set([remove_suffix(topic) for topic in self._consumers_store])\n    return consume_topics.union(produce_topics)\n\n# %% ../../nbs/015_FastKafka.ipynb 41\n@patch\ndef run_in_background(\n    self: FastKafka,\n) -> Callable[\n    [Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]\n]:\n    \"\"\"\n    Decorator to schedule a task to be run in the background.\n\n    This decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\n\n    Returns:\n        Callable[None, None]: A decorator function that takes a background task as an input and stores it to be run in the backround.\n    \"\"\"\n\n    def _decorator(\n        bg_task: Callable[..., Coroutine[Any, Any, Any]]\n    ) -> Callable[..., Coroutine[Any, Any, Any]]:\n        \"\"\"\n        Store the background task.\n\n        Args:\n            bg_task (Callable[[], None]): The background task to be run asynchronously.\n\n        Returns:\n            Callable[[], None]: Original background task.\n        \"\"\"\n        logger.info(\n            f\"run_in_background() : Adding function '{bg_task.__name__}' as background task\"\n        )\n        self._scheduled_bg_tasks.append(bg_task)\n\n        return bg_task\n\n    return _decorator\n\n# %% ../../nbs/015_FastKafka.ipynb 45\n@patch\ndef _populate_consumers(\n    self: FastKafka,\n    is_shutting_down_f: Callable[[], bool],\n) -> None:\n    default_config: Dict[str, Any] = filter_using_signature(\n        fastkafka._aiokafka_imports.AIOKafkaConsumer, **self._kafka_config\n    )\n\n    bootstrap_server = self._kafka_config[\"bootstrap_servers_id\"]\n\n    self._kafka_consumer_tasks = [\n        asyncio.create_task(\n            aiokafka_consumer_loop(\n                topic=\"_\".join(topic.split(\"_\")[:-1]),\n                decoder_fn=decoder_fn,\n                callback=consumer,\n                msg_type=signature(consumer).parameters[\"msg\"].annotation,\n                is_shutting_down_f=is_shutting_down_f,\n                executor=executor,\n                **{\n                    **default_config,\n                    **override_config,\n                    **{\n                        \"bootstrap_servers\": _get_broker_addr_list(\n                            kafka_brokers.brokers[bootstrap_server]\n                            if kafka_brokers is not None\n                            else self._kafka_brokers.brokers[bootstrap_server]\n                        )\n                    },\n                },\n            )\n        )\n        for topic, (\n            consumer,\n            decoder_fn,\n            executor,\n            kafka_brokers,\n            override_config,\n        ) in self._consumers_store.items()\n    ]\n\n\n@patch\nasync def _shutdown_consumers(\n    self: FastKafka,\n) -> None:\n    if self._kafka_consumer_tasks:\n        await asyncio.wait(self._kafka_consumer_tasks)\n\n# %% ../../nbs/015_FastKafka.ipynb 47\n# TODO: Add passing of vars\nasync def _create_producer(  # type: ignore\n    *,\n    callback: ProduceCallable,\n    default_config: Dict[str, Any],\n    override_config: Dict[str, Any],\n    bootstrap_servers: Union[str, List[str]],\n    producers_list: List[fastkafka._aiokafka_imports.AIOKafkaProducer],\n) -> fastkafka._aiokafka_imports.AIOKafkaProducer:\n    \"\"\"Creates a producer\n\n    Args:\n        callback: A callback function that is called when the producer is ready.\n        producer: An existing producer to use.\n        default_config: A dictionary of default configuration values.\n        override_config: A dictionary of configuration values to override.\n        bootstrap_servers: Bootstrap servers to connect the producer to.\n        producers_list: A list of producers to add the new producer to.\n\n    Returns:\n        A producer.\n    \"\"\"\n\n    config = {\n        **filter_using_signature(\n            fastkafka._aiokafka_imports.AIOKafkaProducer, **default_config\n        ),\n        **filter_using_signature(\n            fastkafka._aiokafka_imports.AIOKafkaProducer, **override_config\n        ),\n        **{\"bootstrap_servers\": bootstrap_servers},\n    }\n\n    producer = fastkafka._aiokafka_imports.AIOKafkaProducer(**config)\n    logger.info(\n        f\"_create_producer() : created producer using the config: '{sanitize_kafka_config(**config)}'\"\n    )\n\n    await producer.start()\n\n    producers_list.append(producer)\n\n    return producer\n\n\n@patch\nasync def _populate_producers(self: FastKafka) -> None:\n    \"\"\"Populates the producers for the FastKafka instance.\n\n    Args:\n        self: The FastKafka instance.\n\n    Returns:\n        None.\n\n    Raises:\n        None.\n    \"\"\"\n    default_config: Dict[str, Any] = self._kafka_config\n    bootstrap_server = default_config[\"bootstrap_servers_id\"]\n\n    self._producers_list = []\n    self._producers_store.update(\n        {\n            topic: (\n                callback,\n                await _create_producer(\n                    callback=callback,\n                    default_config=default_config,\n                    override_config=override_config,\n                    bootstrap_servers=_get_broker_addr_list(\n                        kafka_brokers.brokers[bootstrap_server]\n                        if kafka_brokers is not None\n                        else self._kafka_brokers.brokers[bootstrap_server]\n                    ),\n                    producers_list=self._producers_list,\n                ),\n                kafka_brokers,\n                override_config,\n            )\n            for topic, (\n                callback,\n                _,\n                kafka_brokers,\n                override_config,\n            ) in self._producers_store.items()\n        }\n    )\n\n\n@patch\nasync def _shutdown_producers(self: FastKafka) -> None:\n    [await producer.stop() for producer in self._producers_list[::-1]]\n    # Remove references to stale producers\n    self._producers_list = []\n    self._producers_store.update(\n        {\n            topic: (\n                callback,\n                None,\n                kafka_brokers,\n                override_config,\n            )\n            for topic, (\n                callback,\n                _,\n                kafka_brokers,\n                override_config,\n            ) in self._producers_store.items()\n        }\n    )\n\n# %% ../../nbs/015_FastKafka.ipynb 49\n@patch\nasync def _populate_bg_tasks(\n    self: FastKafka,\n) -> None:\n    def _start_bg_task(task: Callable[..., Coroutine[Any, Any, Any]]) -> asyncio.Task:\n        logger.info(\n            f\"_populate_bg_tasks() : Starting background task '{task.__name__}'\"\n        )\n        return asyncio.create_task(task(), name=task.__name__)\n\n    self._running_bg_tasks = [_start_bg_task(task) for task in self._scheduled_bg_tasks]\n\n\n@patch\nasync def _shutdown_bg_tasks(\n    self: FastKafka,\n) -> None:\n    for task in self._running_bg_tasks:\n        logger.info(\n            f\"_shutdown_bg_tasks() : Cancelling background task '{task.get_name()}'\"\n        )\n        task.cancel()\n\n    for task in self._running_bg_tasks:\n        logger.info(\n            f\"_shutdown_bg_tasks() : Waiting for background task '{task.get_name()}' to finish\"\n        )\n        try:\n            await task\n        except asyncio.CancelledError:\n            pass\n        logger.info(\n            f\"_shutdown_bg_tasks() : Execution finished for background task '{task.get_name()}'\"\n        )\n\n# %% ../../nbs/015_FastKafka.ipynb 51\n@patch\nasync def _start(self: FastKafka) -> None:\n    def is_shutting_down_f(self: FastKafka = self) -> bool:\n        return self._is_shutting_down\n\n    #     self.create_docs()\n    await self._populate_producers()\n    self._populate_consumers(is_shutting_down_f)\n    await self._populate_bg_tasks()\n\n    self._is_started = True\n\n\n@patch\nasync def _stop(self: FastKafka) -> None:\n    self._is_shutting_down = True\n\n    await self._shutdown_bg_tasks()\n    await self._shutdown_consumers()\n    await self._shutdown_producers()\n\n    self._is_shutting_down = False\n    self._is_started = False\n\n# %% ../../nbs/015_FastKafka.ipynb 57\n@patch\ndef create_docs(self: FastKafka) -> None:\n    \"\"\"\n    Create the asyncapi documentation based on the configured consumers and producers.\n\n    This function exports the asyncapi specification based on the configured consumers\n    and producers in the FastKafka instance. It generates the asyncapi documentation by\n    extracting the topics and callbacks from the consumers and producers.\n\n    Note:\n        The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\n        attribute of the FastKafka instance.\n    \"\"\"\n    (self._asyncapi_path / \"docs\").mkdir(exist_ok=True, parents=True)\n    (self._asyncapi_path / \"spec\").mkdir(exist_ok=True, parents=True)\n    export_async_spec(\n        consumers={\n            remove_suffix(topic) if topic.endswith(\"_0\") else topic: callback\n            for topic, (callback, _, _, _, _) in self._consumers_store.items()\n        },\n        producers={\n            remove_suffix(topic) if topic.endswith(\"_0\") else topic: callback\n            for topic, (callback, _, _, _) in self._producers_store.items()\n        },\n        kafka_brokers=self._kafka_brokers,\n        kafka_service_info=self._kafka_service_info,\n        asyncapi_path=self._asyncapi_path,\n    )\n\n# %% ../../nbs/015_FastKafka.ipynb 61\nclass AwaitedMock:\n    \"\"\"\n    Class representing an awaited mock object.\n\n    Args:\n        o: The original object to be wrapped.\n    \"\"\"\n\n    @staticmethod\n    def _await_for(f: Callable[..., Any]) -> Callable[..., Any]:\n        @delegates(f)\n        async def inner(\n            *args: Any, f: Callable[..., Any] = f, timeout: int = 60, **kwargs: Any\n        ) -> Any:\n            \"\"\"\n            Decorator to await the execution of a function.\n\n            Args:\n                f: The function to be wrapped.\n\n            Returns:\n                The wrapped function.\n            \"\"\"\n            if inspect.iscoroutinefunction(f):\n                return await asyncio.wait_for(f(*args, **kwargs), timeout=timeout)\n            else:\n                t0 = datetime.now()\n                e: Optional[Exception] = None\n                while True:\n                    try:\n                        return f(*args, **kwargs)\n                    except Exception as _e:\n                        await asyncio.sleep(1)\n                        e = _e\n\n                    if datetime.now() - t0 > timedelta(seconds=timeout):\n                        break\n\n                raise e\n\n        return inner\n\n    def __init__(self, o: Any):\n        \"\"\"\n        Initializes an instance of AwaitedMock.\n\n        Args:\n            o: The original object to be wrapped.\n        \"\"\"\n        self._o = o\n\n        for name in o.__dir__():\n            if not name.startswith(\"_\"):\n                f = getattr(o, name)\n                if inspect.ismethod(f):\n                    setattr(self, name, self._await_for(f))\n\n# %% ../../nbs/015_FastKafka.ipynb 62\n@patch\ndef create_mocks(self: FastKafka) -> None:\n    \"\"\"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\"\"\"\n    app_methods = [f for f, _, _, _, _ in self._consumers_store.values()] + [\n        f for f, _, _, _ in self._producers_store.values()\n    ]\n    self.AppMocks = namedtuple(  # type: ignore\n        f\"{self.__class__.__name__}Mocks\", [f.__name__ for f in app_methods]\n    )\n\n    self.mocks = self.AppMocks(  # type: ignore\n        **{\n            f.__name__: AsyncMock() if inspect.iscoroutinefunction(f) else MagicMock()\n            for f in app_methods\n        }\n    )\n\n    self.awaited_mocks = self.AppMocks(  # type: ignore\n        **{name: AwaitedMock(mock) for name, mock in self.mocks._asdict().items()}\n    )\n\n    def add_mock(\n        f: Callable[..., Any], mock: Union[AsyncMock, MagicMock]\n    ) -> Callable[..., Any]:\n        \"\"\"Add call to mock when calling function f\"\"\"\n\n        @functools.wraps(f)\n        async def async_inner(\n            *args: Any, f: Callable[..., Any] = f, mock: AsyncMock = mock, **kwargs: Any\n        ) -> Any:\n            await mock(*deepcopy(args), **kwargs)\n            return await f(*args, **kwargs)\n\n        @functools.wraps(f)\n        def sync_inner(\n            *args: Any, f: Callable[..., Any] = f, mock: MagicMock = mock, **kwargs: Any\n        ) -> Any:\n            mock(*deepcopy(args), **kwargs)\n            return f(*args, **kwargs)\n\n        if inspect.iscoroutinefunction(f):\n            return async_inner\n        else:\n            return sync_inner\n\n    self._consumers_store.update(\n        {\n            name: (\n                add_mock(f, getattr(self.mocks, f.__name__)),\n                decoder_fn,\n                executor,\n                kafka_brokers,\n                kwargs,\n            )\n            for name, (\n                f,\n                decoder_fn,\n                executor,\n                kafka_brokers,\n                kwargs,\n            ) in self._consumers_store.items()\n        }\n    )\n\n    self._producers_store.update(\n        {\n            name: (\n                add_mock(f, getattr(self.mocks, f.__name__)),\n                producer,\n                kafka_brokers,\n                kwargs,\n            )\n            for name, (\n                f,\n                producer,\n                kafka_brokers,\n                kwargs,\n            ) in self._producers_store.items()\n        }\n    )\n\n# %% ../../nbs/015_FastKafka.ipynb 67\n@patch\ndef benchmark(\n    self: FastKafka,\n    interval: Union[int, timedelta] = 1,\n    *,\n    sliding_window_size: Optional[int] = None,\n) -> Callable[[Callable[[I], Optional[O]]], Callable[[I], Optional[O]]]:\n    \"\"\"Decorator to benchmark produces/consumes functions\n\n    Args:\n        interval: Period to use to calculate throughput. If value is of type int,\n            then it will be used as seconds. If value is of type timedelta,\n            then it will be used as it is. default: 1 - one second\n        sliding_window_size: The size of the sliding window to use to calculate\n            average throughput. default: None - By default average throughput is\n            not calculated\n    \"\"\"\n\n    def _decorator(func: Callable[[I], Optional[O]]) -> Callable[[I], Optional[O]]:\n        func_name = f\"{func.__module__}.{func.__qualname__}\"\n\n        @wraps(func)\n        def wrapper(\n            *args: I,\n            **kwargs: I,\n        ) -> Optional[O]:\n            _benchmark(\n                interval=interval,\n                sliding_window_size=sliding_window_size,\n                func_name=func_name,\n                benchmark_results=self.benchmark_results,\n            )\n            return func(*args, **kwargs)\n\n        @wraps(func)\n        async def async_wrapper(\n            *args: I,\n            **kwargs: I,\n        ) -> Optional[O]:\n            _benchmark(\n                interval=interval,\n                sliding_window_size=sliding_window_size,\n                func_name=func_name,\n                benchmark_results=self.benchmark_results,\n            )\n            return await func(*args, **kwargs)  # type: ignore\n\n        if inspect.iscoroutinefunction(func):\n            return async_wrapper  # type: ignore\n        else:\n            return wrapper\n\n    return _decorator\n\n# %% ../../nbs/015_FastKafka.ipynb 69\n@patch\ndef fastapi_lifespan(\n    self: FastKafka, kafka_broker_name: str\n) -> Callable[[\"FastAPI\"], AsyncIterator[None]]:\n    \"\"\"\n    Method for managing the lifespan of a FastAPI application with a specific Kafka broker.\n\n    Args:\n        kafka_broker_name: The name of the Kafka broker to start FastKafka\n\n    Returns:\n        Lifespan function to use for initializing FastAPI\n    \"\"\"\n\n    @asynccontextmanager\n    async def lifespan(fastapi_app: \"FastAPI\") -> AsyncIterator[None]:\n        self.set_kafka_broker(kafka_broker_name=kafka_broker_name)\n        async with self:\n            yield\n\n    return lifespan  # type: ignore\n"
  },
  {
    "path": "fastkafka/_application/tester.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/016_Tester.ipynb.\n\n# %% auto 0\n__all__ = ['Tester', 'mirror_producer', 'mirror_consumer', 'AmbiguousWarning', 'set_sugar']\n\n# %% ../../nbs/016_Tester.ipynb 1\nimport asyncio\nimport collections\nimport inspect\nfrom unittest.mock import AsyncMock, MagicMock\nimport json\nfrom contextlib import asynccontextmanager\nfrom itertools import groupby\nfrom typing import *\nfrom types import ModuleType\n\nfrom pydantic import BaseModel\n\nfrom .. import KafkaEvent\nfrom .app import FastKafka, AwaitedMock, _get_kafka_brokers\nfrom .._components.asyncapi import KafkaBroker, KafkaBrokers\nfrom .._components.helpers import unwrap_list_type\nfrom .._components.meta import delegates, export, patch\nfrom .._components.producer_decorator import unwrap_from_kafka_event\nfrom .._components.aiokafka_consumer_loop import ConsumeCallable\nfrom .._testing.apache_kafka_broker import ApacheKafkaBroker\nfrom .._testing.in_memory_broker import InMemoryBroker\nfrom .._testing.local_redpanda_broker import LocalRedpandaBroker\nfrom .._components.helpers import remove_suffix\n\n# %% ../../nbs/016_Tester.ipynb 7\ndef _get_broker_spec(bootstrap_server: str) -> KafkaBroker:\n    \"\"\"\n    Helper function to get the broker specification from the bootstrap server URL.\n\n    Args:\n        bootstrap_server: The bootstrap server URL in the format \"<host>:<port>\".\n\n    Returns:\n        A KafkaBroker object representing the broker specification.\n    \"\"\"\n    url = bootstrap_server.split(\":\")[0]\n    port = bootstrap_server.split(\":\")[1]\n    return KafkaBroker(url=url, port=port, description=\"\", protocol=\"\")\n\n# %% ../../nbs/016_Tester.ipynb 9\n@export(\"fastkafka.testing\")\nclass Tester(FastKafka):\n    __test__ = False\n\n    def __init__(\n        self,\n        app: Union[FastKafka, List[FastKafka]],\n        *,\n        use_in_memory_broker: bool = True,\n    ):\n        \"\"\"Mirror-like object for testing a FastKafka application\n\n        Can be used as context manager\n\n        Args:\n            app: The FastKafka application to be tested.\n            use_in_memory_broker: Whether to use an in-memory broker for testing or not.\n        \"\"\"\n        self.apps = app if isinstance(app, list) else [app]\n\n        for app in self.apps:\n            app.create_mocks()\n\n        super().__init__()\n        self.mirrors: Dict[Any, Any] = {}\n        self._kafka_brokers = self.apps[0]._kafka_brokers\n        self._kafka_config[\"bootstrap_servers_id\"] = self.apps[0]._kafka_config[\n            \"bootstrap_servers_id\"\n        ]\n        self._create_mirrors()\n        self.use_in_memory_broker = use_in_memory_broker\n\n    async def _start_tester(self) -> None:\n        \"\"\"Starts the Tester\"\"\"\n        for app in self.apps:\n            await app.__aenter__()\n        self.create_mocks()\n        self._arrange_mirrors()\n        await super().__aenter__()\n        await asyncio.sleep(3)\n\n    async def _stop_tester(self) -> None:\n        \"\"\"Shuts down the Tester\"\"\"\n        await super().__aexit__(None, None, None)\n        for app in self.apps[::-1]:\n            await app.__aexit__(None, None, None)\n\n    def _create_mirrors(self) -> None:\n        pass\n\n    def _arrange_mirrors(self) -> None:\n        pass\n\n    def _set_arguments_and_return_old(\n        self, bootstrap_servers_id: Optional[str], use_in_memory_broker: bool\n    ) -> Dict[Any, Any]:\n        initial_arguments: Dict[Any, Any] = dict()\n        initial_arguments[\"use_in_memory_broker\"] = self.use_in_memory_broker\n        self.use_in_memory_broker = use_in_memory_broker\n\n        initial_arguments[\"bootstrap_servers_id\"] = self._kafka_config[\n            \"bootstrap_servers_id\"\n        ]\n        if bootstrap_servers_id is None:\n            bootstrap_servers_id = self._kafka_config[\"bootstrap_servers_id\"]\n        else:\n            self._kafka_config[\"bootstrap_servers_id\"] = bootstrap_servers_id\n\n        for app in self.apps:\n            initial_arguments[app] = app._kafka_config[\"bootstrap_servers_id\"]\n            app._kafka_config[\"bootstrap_servers_id\"] = bootstrap_servers_id\n\n        return initial_arguments\n\n    def _restore_initial_arguments(self, initial_arguments: Dict[Any, Any]) -> None:\n        self.use_in_memory_broker = initial_arguments[\"use_in_memory_broker\"]\n        self._kafka_config[\"bootstrap_servers_id\"] = initial_arguments[\n            \"bootstrap_servers_id\"\n        ]\n\n        for app in self.apps:\n            app._kafka_config[\"bootstrap_servers_id\"] = initial_arguments[app]\n\n    @asynccontextmanager\n    async def using_external_broker(\n        self,\n        bootstrap_servers_id: Optional[str] = None,\n    ) -> AsyncGenerator[\"Tester\", None]:\n        \"\"\"Tester context manager for using external broker\n\n        Args:\n            bootstrap_servers_id: The bootstrap server of aplications.\n\n        Returns:\n            self or None\n        \"\"\"\n        initial_arguments = self._set_arguments_and_return_old(\n            bootstrap_servers_id, use_in_memory_broker=False\n        )\n\n        async with self._create_ctx() as ctx:\n            try:\n                yield self\n            finally:\n                self._restore_initial_arguments(initial_arguments)\n\n    @asynccontextmanager\n    async def using_inmemory_broker(\n        self,\n        bootstrap_servers_id: Optional[str] = None,\n    ) -> AsyncGenerator[\"Tester\", None]:\n        \"\"\"Tester context manager for using in-memory broker\n\n        Args:\n            bootstrap_servers_id: The bootstrap server of aplications.\n\n        Returns:\n            self or None\n        \"\"\"\n        initial_arguments = self._set_arguments_and_return_old(\n            bootstrap_servers_id, use_in_memory_broker=True\n        )\n\n        async with self._create_ctx() as ctx:\n            try:\n                yield self\n            finally:\n                self._restore_initial_arguments(initial_arguments)\n\n    @asynccontextmanager\n    async def _create_ctx(self) -> AsyncGenerator[\"Tester\", None]:\n        if self.use_in_memory_broker == True:\n            with InMemoryBroker():  # type: ignore\n                await self._start_tester()\n                try:\n                    yield self\n                finally:\n                    await self._stop_tester()\n        else:\n            await self._start_tester()\n            try:\n                yield self\n            finally:\n                await self._stop_tester()\n\n    async def __aenter__(self) -> \"Tester\":\n        self._ctx = self._create_ctx()\n        return await self._ctx.__aenter__()\n\n    async def __aexit__(self, *args: Any) -> None:\n        await self._ctx.__aexit__(*args)\n\n# %% ../../nbs/016_Tester.ipynb 16\ndef mirror_producer(\n    topic: str, producer_f: Callable[..., Any], brokers: str, app: FastKafka\n) -> Callable[..., Any]:\n    \"\"\"\n    Decorator to create a mirrored producer function.\n\n    Args:\n        topic: The topic to produce to.\n        producer_f: The original producer function.\n        brokers: The brokers configuration.\n        app: The FastKafka application.\n\n    Returns:\n        The mirrored producer function.\n    \"\"\"\n    msg_type = inspect.signature(producer_f).return_annotation\n\n    msg_type_unwrapped = unwrap_list_type(unwrap_from_kafka_event(msg_type))\n\n    async def skeleton_func(msg: BaseModel) -> None:\n        pass\n\n    mirror_func = skeleton_func\n    sig = inspect.signature(skeleton_func)\n\n    # adjust name, take into consideration the origin app and brokers\n    # configuration so that we can differentiate those two\n    mirror_func.__name__ = f\"mirror_{id(app)}_on_{remove_suffix(topic).replace('.', '_').replace('-', '_')}_{abs(hash(brokers))}\"\n\n    # adjust arg and return val\n    sig = sig.replace(\n        parameters=[\n            inspect.Parameter(\n                name=\"msg\",\n                annotation=msg_type_unwrapped,\n                kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,\n            )\n        ]\n    )\n\n    mirror_func.__signature__ = sig  # type: ignore\n\n    return mirror_func\n\n# %% ../../nbs/016_Tester.ipynb 19\ndef mirror_consumer(\n    topic: str, consumer_f: Callable[..., Any], brokers: str, app: FastKafka\n) -> Callable[[BaseModel], Coroutine[Any, Any, BaseModel]]:\n    \"\"\"\n    Decorator to create a mirrored consumer function.\n\n    Args:\n        topic: The topic to consume from.\n        consumer_f: The original consumer function.\n        brokers: The brokers configuration.\n        app: The FastKafka application.\n\n    Returns:\n        The mirrored consumer function.\n    \"\"\"\n    msg_type = inspect.signature(consumer_f).parameters[\"msg\"]\n\n    msg_type_unwrapped = unwrap_list_type(msg_type)\n\n    async def skeleton_func(msg: BaseModel) -> BaseModel:\n        return msg\n\n    mirror_func = skeleton_func\n    sig = inspect.signature(skeleton_func)\n\n    # adjust name, take into consideration the origin app and brokers\n    # configuration so that we can differentiate those two\n    mirror_func.__name__ = f\"mirror_{id(app)}_to_{remove_suffix(topic).replace('.', '_').replace('-', '_')}_{abs(hash(brokers))}\"\n\n    # adjust arg and return val\n    sig = sig.replace(\n        parameters=[msg_type], return_annotation=msg_type_unwrapped.annotation\n    )\n\n    mirror_func.__signature__ = sig  # type: ignore\n    return mirror_func\n\n# %% ../../nbs/016_Tester.ipynb 21\n@patch\ndef _create_mirrors(self: Tester) -> None:\n    \"\"\"\n    Creates mirror functions for producers and consumers.\n\n    Iterates over the FastKafka application and its producers and consumers. For each consumer, it creates a mirror\n    consumer function using the `mirror_consumer` decorator. For each producer, it creates a mirror producer function\n    using the `mirror_producer` decorator. The mirror functions are stored in the `self.mirrors` dictionary and also\n    set as attributes on the Tester instance.\n\n    Returns:\n        None\n    \"\"\"\n    for app in self.apps:\n        for topic, (consumer_f, _, _, brokers, _) in app._consumers_store.items():\n            mirror_f = mirror_consumer(\n                topic,\n                consumer_f,\n                brokers.model_dump_json()\n                if brokers is not None\n                else app._kafka_brokers.model_dump_json(),\n                app,\n            )\n            mirror_f = self.produces(  # type: ignore\n                topic=remove_suffix(topic),\n                brokers=brokers,\n            )(mirror_f)\n            self.mirrors[consumer_f] = mirror_f\n            setattr(self, mirror_f.__name__, mirror_f)\n        for topic, (producer_f, _, brokers, _) in app._producers_store.items():\n            mirror_f = mirror_producer(\n                topic,\n                producer_f,\n                brokers.model_dump_json()\n                if brokers is not None\n                else app._kafka_brokers.model_dump_json(),\n                app,\n            )\n            mirror_f = self.consumes(\n                topic=remove_suffix(topic),\n                brokers=brokers,\n            )(\n                mirror_f  # type: ignore\n            )\n            self.mirrors[producer_f] = mirror_f\n            setattr(self, mirror_f.__name__, mirror_f)\n\n# %% ../../nbs/016_Tester.ipynb 25\nclass AmbiguousWarning:\n    \"\"\"\n    Warning class used for ambiguous topics.\n\n    Args:\n        topic: The ambiguous topic.\n        functions: List of function names associated with the ambiguous topic.\n    \"\"\"\n\n    def __init__(self, topic: str, functions: List[str]):\n        self.topic = topic\n        self.functions = functions\n\n    def __getattribute__(self, attr: str) -> Any:\n        raise RuntimeError(\n            f\"Ambiguous topic: {super().__getattribute__('topic')}, for functions: {super().__getattribute__('functions')}\\nUse Tester.mirrors[app.function] to resolve ambiguity\"\n        )\n\n    def __call__(self, *args: Any, **kwargs: Any) -> Any:\n        raise RuntimeError(\n            f\"Ambiguous topic: {self.topic}, for functions: {self.functions}\\nUse Tester.mirrors[app.function] to resolve ambiguity\"\n        )\n\n# %% ../../nbs/016_Tester.ipynb 27\ndef set_sugar(\n    *,\n    tester: Tester,\n    prefix: str,\n    topic_brokers: Dict[str, Tuple[List[str], List[str]]],\n    topic: str,\n    brokers: str,\n    origin_function_name: str,\n    function: Callable[..., Union[Any, Awaitable[Any]]],\n) -> None:\n    \"\"\"\n    Sets the sugar function for a topic.\n\n    Args:\n        tester: The Tester instance.\n        prefix: The prefix to use for the sugar function (e.g., \"to_\" or \"on_\").\n        topic_brokers: Dictionary to store the brokers and functions associated with each topic.\n        topic: The topic name.\n        brokers: The brokers configuration.\n        origin_function_name: The name of the original function.\n        function: The mirror function to be set as the sugar function.\n\n    Returns:\n        None\n    \"\"\"\n    brokers_for_topic, functions_for_topic = topic_brokers.get(topic, ([], []))\n    if brokers not in brokers_for_topic:\n        brokers_for_topic.append(brokers)\n        functions_for_topic.append(origin_function_name)\n        topic_brokers[topic] = (brokers_for_topic, functions_for_topic)\n    if len(brokers_for_topic) == 1:\n        setattr(tester, f\"{prefix}{topic}\", function)\n    else:\n        setattr(\n            tester, f\"{prefix}{topic}\", AmbiguousWarning(topic, functions_for_topic)\n        )\n\n# %% ../../nbs/016_Tester.ipynb 28\n@patch\ndef _arrange_mirrors(self: Tester) -> None:\n    \"\"\"\n    Arranges the mirror functions.\n\n    Iterates over the FastKafka application and its producers and consumers. For each consumer, it retrieves the mirror\n    function from the `self.mirrors` dictionary and sets it as an attribute on the Tester instance. It also sets the\n    sugar function using the `set_sugar` function. For each producer, it retrieves the mirror function and sets it as\n    an attribute on the Tester instance. It also sets the sugar function for the awaited mocks. Finally, it creates the\n    `mocks` and `awaited_mocks` namedtuples and sets them as attributes on the Tester instance.\n\n    Returns:\n        None\n    \"\"\"\n    topic_brokers: Dict[str, Tuple[List[str], List[str]]] = {}\n    mocks = {}\n    awaited_mocks = {}\n    for app in self.apps:\n        for topic, (consumer_f, _, _, brokers, _) in app._consumers_store.items():\n            mirror_f = self.mirrors[consumer_f]\n            self.mirrors[getattr(app, consumer_f.__name__)] = mirror_f\n            set_sugar(\n                tester=self,\n                prefix=\"to_\",\n                topic_brokers=topic_brokers,\n                topic=remove_suffix(topic).replace(\".\", \"_\").replace(\"-\", \"_\"),\n                brokers=brokers.model_dump_json()\n                if brokers is not None\n                else app._kafka_brokers.model_dump_json(),\n                origin_function_name=consumer_f.__name__,\n                function=mirror_f,\n            )\n\n            mocks[\n                f\"to_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\"\n            ] = getattr(self.mocks, mirror_f.__name__)\n            awaited_mocks[\n                f\"to_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\"\n            ] = getattr(self.awaited_mocks, mirror_f.__name__)\n\n        for topic, (producer_f, _, brokers, _) in app._producers_store.items():\n            mirror_f = self.mirrors[producer_f]\n            self.mirrors[getattr(app, producer_f.__name__)] = getattr(\n                self.awaited_mocks, mirror_f.__name__\n            )\n            set_sugar(\n                tester=self,\n                prefix=\"on_\",\n                topic_brokers=topic_brokers,\n                topic=remove_suffix(topic).replace(\".\", \"_\").replace(\"-\", \"_\"),\n                brokers=brokers.model_dump_json()\n                if brokers is not None\n                else app._kafka_brokers.model_dump_json(),\n                origin_function_name=producer_f.__name__,\n                function=getattr(self.awaited_mocks, mirror_f.__name__),\n            )\n            mocks[\n                f\"on_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\"\n            ] = getattr(self.mocks, mirror_f.__name__)\n            awaited_mocks[\n                f\"on_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\"\n            ] = getattr(self.awaited_mocks, mirror_f.__name__)\n\n    AppMocks = collections.namedtuple(  # type: ignore\n        f\"{self.__class__.__name__}Mocks\", [f_name for f_name in mocks]\n    )\n    setattr(self, \"mocks\", AppMocks(**mocks))\n    setattr(self, \"awaited_mocks\", AppMocks(**awaited_mocks))\n"
  },
  {
    "path": "fastkafka/_cli.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/023_CLI.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'run']\n\n# %% ../nbs/023_CLI.ipynb 1\nimport asyncio\nimport multiprocessing\nfrom typing import *\n\nimport typer\n\nfrom . import _cli_docs, _cli_testing\nfrom ._components.logger import get_logger\nfrom ._server import run_fastkafka_server\n\n# %% ../nbs/023_CLI.ipynb 5\nlogger = get_logger(__name__, level=20)\n\n# %% ../nbs/023_CLI.ipynb 8\n_app = typer.Typer(help=\"\")\n\n# %% ../nbs/023_CLI.ipynb 9\n@_app.command(\n    help=\"Runs Fast Kafka API application\",\n)\ndef run(\n    num_workers: int = typer.Option(\n        multiprocessing.cpu_count(),\n        help=\"Number of FastKafka instances to run, defaults to number of CPU cores.\",\n    ),\n    app: str = typer.Argument(\n        ...,\n        help=\"input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\",\n    ),\n    kafka_broker: str = typer.Option(\n        \"localhost\",\n        help=\"kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.\",\n    ),\n) -> None:\n    \"\"\"\n    Runs FastKafka application.\n\n    Args:\n        num_workers (int): Number of FastKafka instances to run, defaults to the number of CPU cores.\n        app (str): Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\n        kafka_broker (str): Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\n\n    Raises:\n        typer.Exit: If there is an unexpected internal error.\n    \"\"\"\n    try:\n        asyncio.run(\n            run_fastkafka_server(\n                num_workers=num_workers, app=app, kafka_broker=kafka_broker\n            )\n        )\n    except Exception as e:\n        typer.secho(f\"Unexpected internal error: {e}\", err=True, fg=typer.colors.RED)\n        raise typer.Exit(1)\n\n# %% ../nbs/023_CLI.ipynb 12\n_app.add_typer(_cli_docs._docs_app, name=\"docs\")\n\n# %% ../nbs/023_CLI.ipynb 20\n_app.add_typer(_cli_testing._testing_app, name=\"testing\")\n"
  },
  {
    "path": "fastkafka/_cli_docs.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/024_CLI_Docs.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'docs_install_deps', 'generate_docs', 'serve_docs']\n\n# %% ../nbs/024_CLI_Docs.ipynb 1\nimport asyncio\nimport platform\nimport signal\nimport socketserver\nfrom http.server import SimpleHTTPRequestHandler\nfrom pathlib import Path\nfrom types import FrameType\nfrom typing import *\n\nimport typer\n\nfrom fastkafka._components.docs_dependencies import (\n    _check_npm_with_local,\n    _install_docs_npm_deps,\n    _install_node,\n)\nfrom ._components.helpers import _import_from_string, change_dir\nfrom ._components.logger import get_logger\n\n# %% ../nbs/024_CLI_Docs.ipynb 5\nlogger = get_logger(__name__)\n\n# %% ../nbs/024_CLI_Docs.ipynb 8\n_docs_app = typer.Typer(help=\"Commands for managing FastKafka app documentation\")\n\n# %% ../nbs/024_CLI_Docs.ipynb 9\n@_docs_app.command(\n    \"install_deps\",\n    help=\"Installs dependencies for FastKafka documentation generation\",\n)\ndef docs_install_deps() -> None:\n    \"\"\"\n    Installs dependencies for FastKafka documentation generation.\n\n    Raises:\n        typer.Abort: If the user chooses not to install NodeJS and npm locally.\n    \"\"\"\n    try:\n        _check_npm_with_local()\n    except Exception as e:\n        typer.secho(f\"Unexpected internal error: {e}\", err=True, fg=typer.colors.RED)\n        install_confirm = typer.confirm(\n            \"npm not found or version is too low, do you want us to install the NodeJS and npm locally?\"\n        )\n        if install_confirm is False:\n            print(\"Not installing NodeJS and npm locally, exiting..\")\n            raise typer.Abort()\n        else:\n            _install_node()\n    asyncio.run(_install_docs_npm_deps())\n\n\n@_docs_app.command(\n    \"generate\",\n    help=\"Generates documentation for a FastKafka application\",\n)\ndef generate_docs(\n    root_path: Optional[str] = typer.Option(\n        default=None,\n        help=\"root path under which documentation will be created; default is current directory\",\n        show_default=False,\n    ),\n    app: str = typer.Argument(\n        ...,\n        help=\"input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\",\n    ),\n) -> None:\n    \"\"\"\n    Generates documentation for a FastKafka application.\n\n    Args:\n        root_path: The root path under which the documentation will be created.\n            Default is the current directory.\n        app: Input in the form of 'path:app', where **path** is the path to a python\n            file and **app** is an object of type **FastKafka**.\n\n    Raises:\n        typer.Exit: If there is an unexpected internal error.\n    \"\"\"\n    try:\n        application = _import_from_string(app)\n        if root_path is not None:\n            application._root_path = Path(root_path)\n            application._asyncapi_path = application._root_path / \"asyncapi\"\n\n        application.skip_docs = False\n        application.create_docs()\n    except Exception as e:\n        typer.secho(f\"Unexpected internal error: {e}\", err=True, fg=typer.colors.RED)\n        raise typer.Exit(1)\n\n\n@_docs_app.command(\n    \"serve\",\n    help=\"Generates and serves documentation for a FastKafka application\",\n)\ndef serve_docs(\n    root_path: str = typer.Option(\n        default=None,\n        help=\"root path under which documentation will be created; default is current directory\",\n        show_default=False,\n    ),\n    bind: str = typer.Option(\"127.0.0.1\", help=\"Some info\"),\n    port: int = typer.Option(8000, help=\"Some info\"),\n    app: str = typer.Argument(\n        ...,\n        help=\"input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\",\n    ),\n) -> None:\n    \"\"\"\n    Generates and serves documentation for a FastKafka application.\n\n    Args:\n        root_path: The root path under which the documentation will be created.\n            Default is the current directory.\n        bind: The IP address to bind the server to. Default is '127.0.0.1'.\n        port: The port number to bind the server to. Default is 8000.\n        app: Input in the form of 'path:app', where **path** is the path to a python\n            file and **app** is an object of type **FastKafka**.\n\n    Raises:\n        typer.Exit: If there is an unexpected internal error.\n    \"\"\"\n    try:\n        application = _import_from_string(app)\n        if root_path is not None:\n            application._root_path = Path(root_path)\n            application._asyncapi_path = application._root_path / \"asyncapi\"\n\n        application.create_docs()\n        with change_dir(str(application._asyncapi_path / \"docs\")):\n            server_address = (bind, port)\n            handler = SimpleHTTPRequestHandler\n\n            d = {\"should_stop\": False}\n\n            def sigint_handler(\n                signal: int, frame: Optional[FrameType], d: Dict[str, bool] = d\n            ) -> None:\n                d[\"should_stop\"] = True\n\n            signal.signal(signal.SIGINT, sigint_handler)\n            signal.signal(signal.SIGTERM, sigint_handler)\n            if platform.system() == \"Windows\":\n                signal.signal(signal.SIGBREAK, sigint_handler)  # type: ignore\n\n            with socketserver.TCPServer(server_address, handler) as httpd:\n                httpd.timeout = 0.1\n                typer.secho(\n                    f\"Serving documentation on http://{server_address[0]}:{server_address[1]}\"\n                )\n                while not d[\"should_stop\"]:\n                    httpd.handle_request()\n                typer.secho(f\"Interupting serving of documentation and cleaning up...\")\n    except Exception as e:\n        typer.secho(f\"Unexpected internal error: {e}\", err=True, fg=typer.colors.RED)\n        raise typer.Exit(1)\n"
  },
  {
    "path": "fastkafka/_cli_testing.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/025_CLI_Testing.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'testing_install_deps']\n\n# %% ../nbs/025_CLI_Testing.ipynb 1\nfrom typing import *\n\nimport typer\n\nfrom ._components.logger import get_logger\nfrom ._components.test_dependencies import _install_testing_deps\n\n# %% ../nbs/025_CLI_Testing.ipynb 5\nlogger = get_logger(__name__)\n\n# %% ../nbs/025_CLI_Testing.ipynb 8\n_testing_app = typer.Typer(help=\"Commands for managing FastKafka testing\")\n\n# %% ../nbs/025_CLI_Testing.ipynb 9\n@_testing_app.command(\n    \"install_deps\",\n    help=\"Installs dependencies for FastKafka app testing\",\n)\ndef testing_install_deps() -> None:\n    \"\"\"\n    Installs dependencies for FastKafka app testing.\n\n    Raises:\n        typer.Exit: If there is an unexpected internal error.\n    \"\"\"\n    try:\n        _install_testing_deps()\n    except Exception as e:\n        typer.secho(f\"Unexpected internal error: {e}\", err=True, fg=typer.colors.RED)\n        raise typer.Exit(1)\n"
  },
  {
    "path": "fastkafka/_components/__init__.py",
    "content": ""
  },
  {
    "path": "fastkafka/_components/_subprocess.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/022_Subprocess.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'terminate_asyncio_process', 'run_async_subprocesses']\n\n# %% ../../nbs/022_Subprocess.ipynb 1\nimport asyncio\nimport platform\nimport signal\nfrom typing import *\nfrom types import FrameType\n\nimport asyncer\nimport typer\n\nfrom .logger import get_logger\n\n# %% ../../nbs/022_Subprocess.ipynb 5\nlogger = get_logger(__name__)\n\n# %% ../../nbs/022_Subprocess.ipynb 7\nasync def terminate_asyncio_process(p: asyncio.subprocess.Process) -> None:\n    \"\"\"\n    Terminates an asyncio process.\n\n    Args:\n        p: The asyncio.subprocess.Process instance.\n\n    Returns:\n        None.\n    \"\"\"\n    logger.info(f\"terminate_asyncio_process(): Terminating the process {p.pid}...\")\n    # Check if SIGINT already propagated to process\n    try:\n        await asyncio.wait_for(p.wait(), 1)\n        logger.info(\n            f\"terminate_asyncio_process(): Process {p.pid} was already terminated.\"\n        )\n        return\n    except asyncio.TimeoutError:\n        pass\n\n    for i in range(3):\n        if platform.system() == \"Windows\":\n            import psutil\n\n            try:\n                parent = psutil.Process(p.pid)\n                children = parent.children(recursive=True)\n                for child in children:\n                    child.kill()\n                p.send_signal(signal.CTRL_BREAK_EVENT)  # type: ignore\n            except psutil.NoSuchProcess:\n                pass\n        else:\n            p.terminate()\n        try:\n            await asyncio.wait_for(p.wait(), 10)\n            logger.info(f\"terminate_asyncio_process(): Process {p.pid} terminated.\")\n            return\n        except asyncio.TimeoutError:\n            logger.warning(\n                f\"terminate_asyncio_process(): Process {p.pid} not terminated, retrying...\"\n            )\n\n    logger.warning(f\"Killing the process {p.pid}...\")\n    p.kill()\n    await p.wait()\n    logger.warning(f\"terminate_asyncio_process(): Process {p.pid} killed!\")\n\n# %% ../../nbs/022_Subprocess.ipynb 9\nasync def run_async_subprocesses(\n    commands: List[str], commands_args: List[List[Any]], *, sleep_between: int = 0\n) -> None:\n    \"\"\"\n    Runs multiple async subprocesses.\n\n    Args:\n        commands: A list of commands to execute.\n        commands_args: A list of argument lists for each command.\n        sleep_between: The sleep duration in seconds between starting each subprocess.\n\n    Returns:\n        None.\n    \"\"\"\n    loop = asyncio.get_event_loop()\n\n    HANDLED_SIGNALS = (\n        signal.SIGINT,  # Unix signal 2. Sent by Ctrl+C.\n        signal.SIGTERM,  # Unix signal 15. Sent by `kill <pid>`.\n    )\n    if platform.system() == \"Windows\":\n        HANDLED_SIGNALS = (*HANDLED_SIGNALS, signal.SIGBREAK)  # type: ignore\n\n    d = {\"should_exit\": False}\n\n    def handle_windows_exit(\n        signum: int, frame: Optional[FrameType], d: Dict[str, bool] = d\n    ) -> None:\n        d[\"should_exit\"] = True\n\n    def handle_exit(sig: int, d: Dict[str, bool] = d) -> None:\n        d[\"should_exit\"] = True\n\n    for sig in HANDLED_SIGNALS:\n        if platform.system() == \"Windows\":\n            signal.signal(sig, handle_windows_exit)\n        else:\n            loop.add_signal_handler(sig, handle_exit, sig)\n\n    async with asyncer.create_task_group() as tg:\n        tasks = []\n        for cmd, args in zip(commands, commands_args):\n            tasks.append(\n                tg.soonify(asyncio.create_subprocess_exec)(\n                    cmd,\n                    *args,\n                    stdout=asyncio.subprocess.PIPE,\n                    stdin=asyncio.subprocess.PIPE,\n                )\n            )\n            await asyncio.sleep(sleep_between)\n\n    procs = [task.value for task in tasks]\n\n    async def log_output(\n        output: Optional[asyncio.StreamReader], pid: int, d: Dict[str, bool] = d\n    ) -> None:\n        if output is None:\n            raise RuntimeError(\"Expected StreamReader, got None. Is stdout piped?\")\n        while not output.at_eof():\n            outs = await output.readline()\n            if outs != b\"\":\n                typer.echo(f\"[{pid:03d}]: \" + outs.decode(\"utf-8\"), nl=False)\n\n    async with asyncer.create_task_group() as tg:\n        for proc in procs:\n            tg.soonify(log_output)(proc.stdout, proc.pid)\n\n        while not d[\"should_exit\"]:\n            await asyncio.sleep(0.2)\n\n        typer.echo(\"Starting process cleanup, this may take a few seconds...\")\n        for proc in procs:\n            tg.soonify(terminate_asyncio_process)(proc)\n\n    for proc in procs:\n        output, _ = await proc.communicate()\n        if output:\n            typer.echo(f\"[{proc.pid:03d}]: \" + output.decode(\"utf-8\"), nl=False)\n\n    returncodes = [proc.returncode for proc in procs]\n    if not returncodes == [0] * len(procs):\n        typer.secho(\n            f\"Return codes are not all zero: {returncodes}\",\n            err=True,\n            fg=typer.colors.RED,\n        )\n        raise typer.Exit(1)\n"
  },
  {
    "path": "fastkafka/_components/aiokafka_consumer_loop.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/011_ConsumerLoop.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'AsyncConsume', 'AsyncConsumeMeta', 'SyncConsume', 'SyncConsumeMeta', 'ConsumeCallable', 'EventMetadata',\n           'sanitize_kafka_config', 'aiokafka_consumer_loop']\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 1\nfrom asyncio import iscoroutinefunction, Task  # do not use the version from inspect\nfrom typing import *\nfrom dataclasses import dataclass\n\nimport asyncer\nfrom aiokafka.structs import ConsumerRecord\nfrom pydantic import BaseModel\n\nimport fastkafka._aiokafka_imports\nfrom .logger import get_logger\nfrom .meta import delegates, export\nfrom .task_streaming import get_executor, StreamExecutor\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 5\nlogger = get_logger(__name__)\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 8\n@dataclass\n@export(\"fastkafka\")\nclass EventMetadata:\n    \"\"\"A class for encapsulating Kafka record metadata.\n\n    Args:\n        topic: The topic this record is received from\n        partition: The partition from which this record is received\n        offset: The position of this record in the corresponding Kafka partition\n        timestamp: The timestamp of this record\n        timestamp_type: The timestamp type of this record\n        key: The key (or `None` if no key is specified)\n        value: The value\n        serialized_key_size: The size of the serialized, uncompressed key in bytes\n        serialized_value_size: The size of the serialized, uncompressed value in bytes\n        headers: The headers\n    \"\"\"\n\n    topic: str\n    partition: int\n    offset: int\n    timestamp: int\n    timestamp_type: int\n    key: Optional[bytes]\n    value: Optional[bytes]\n    checksum: int\n    serialized_key_size: int\n    serialized_value_size: int\n    headers: Sequence[Tuple[str, bytes]]\n\n    @staticmethod\n    def create_event_metadata(record: ConsumerRecord) -> \"EventMetadata\":  # type: ignore\n        \"\"\"Creates an instance of EventMetadata from a ConsumerRecord.\n\n        Args:\n            record: The Kafka ConsumerRecord.\n\n        Returns:\n            The created EventMetadata instance.\n        \"\"\"\n        return EventMetadata(\n            topic=record.topic,\n            partition=record.partition,\n            offset=record.offset,\n            timestamp=record.timestamp,\n            timestamp_type=record.timestamp_type,\n            value=record.value,\n            checksum=record.checksum,\n            key=record.key,\n            serialized_key_size=record.serialized_key_size,\n            serialized_value_size=record.serialized_value_size,\n            headers=record.headers,\n        )\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 11\nAsyncConsume = Callable[[Union[List[BaseModel], BaseModel]], Awaitable[None]]\nAsyncConsumeMeta = Callable[\n    [Union[List[BaseModel], BaseModel], Union[List[EventMetadata], EventMetadata]],\n    Awaitable[None],\n]\nSyncConsume = Callable[[Union[List[BaseModel], BaseModel]], None]\nSyncConsumeMeta = Callable[\n    [Union[List[BaseModel], BaseModel], Union[List[EventMetadata], EventMetadata]], None\n]\n\nConsumeCallable = Union[AsyncConsume, AsyncConsumeMeta, SyncConsume, SyncConsumeMeta]\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 12\ndef _callback_parameters_wrapper(\n    callback: Union[AsyncConsume, AsyncConsumeMeta]\n) -> AsyncConsumeMeta:\n    \"\"\"Wraps an async callback and filters the arguments to pass based on if the function accepts EventMetadata as argument\n\n    Args:\n        callback: async callable that will be wrapped\n\n    Returns:\n        Wrapped callback with filtered params\n    \"\"\"\n\n    async def _params_wrap(\n        msg: Union[BaseModel, List[BaseModel]],\n        meta: Union[EventMetadata, List[EventMetadata]],\n        callback: Union[AsyncConsume, AsyncConsumeMeta] = callback,\n    ) -> None:\n        types = list(get_type_hints(callback).values())\n        args: List[\n            Union[BaseModel, List[BaseModel], EventMetadata, List[EventMetadata]]\n        ] = [msg]\n        if EventMetadata in types:\n            args.insert(types.index(EventMetadata), meta)\n        if List[EventMetadata] in types:\n            args.insert(types.index(List[EventMetadata]), meta)\n        await callback(*args)  # type: ignore\n\n    return _params_wrap\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 17\ndef _prepare_callback(callback: ConsumeCallable) -> AsyncConsumeMeta:\n    \"\"\"\n    Prepares a callback to be used in the consumer loop.\n        1. If callback is sync, asyncify it\n        2. Wrap the callback into a safe callback for exception handling\n\n    Args:\n        callback: async callable that will be prepared for use in consumer\n\n    Returns:\n        Prepared callback\n    \"\"\"\n    async_callback: Union[AsyncConsume, AsyncConsumeMeta] = (\n        callback if iscoroutinefunction(callback) else asyncer.asyncify(callback)  # type: ignore\n    )\n    return _callback_parameters_wrapper(async_callback)\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 24\ndef _get_single_msg_handlers(  # type: ignore\n    *,\n    consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer,\n    callback: AsyncConsumeMeta,\n    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\n    msg_type: Type[BaseModel],\n    **kwargs: Any,\n) -> Tuple[\n    Callable[\n        [\n            ConsumerRecord,\n            AsyncConsumeMeta,\n            Callable[[bytes, Type[BaseModel]], Any],\n            Type[BaseModel],\n        ],\n        Awaitable[None],\n    ],\n    Callable[\n        [fastkafka._aiokafka_imports.AIOKafkaConsumer, Any],\n        Awaitable[List[ConsumerRecord]],\n    ],\n]:\n    \"\"\"\n    Retrieves the message handlers for consuming single messages from a Kafka topic.\n\n    Args:\n        consumer: The Kafka consumer instance.\n        callback: The callback function to handle the consumed message.\n        decoder_fn: The function to decode the consumed message.\n        msg_type: The type of the consumed message.\n        **kwargs: Additional keyword arguments for the consumer.\n\n    Returns:\n        The handle_msg function and poll_consumer function.\n    \"\"\"\n\n    async def handle_msg(  # type: ignore\n        record: ConsumerRecord,\n        callback: AsyncConsumeMeta = callback,\n        decoder_fn: Callable[[bytes, Type[BaseModel]], Any] = decoder_fn,\n        msg_type: Type[BaseModel] = msg_type,\n    ) -> None:\n        await callback(\n            decoder_fn(record.value, msg_type),\n            EventMetadata.create_event_metadata(record),\n        )\n\n    async def poll_consumer(  # type: ignore\n        consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer = consumer,\n        kwargs: Any = kwargs,\n    ) -> List[ConsumerRecord]:\n        msgs = await consumer.getmany(**kwargs)\n        return [msg for msg_group in msgs.values() for msg in msg_group]\n\n    return handle_msg, poll_consumer\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 26\ndef _get_batch_msg_handlers(  # type: ignore\n    *,\n    consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer,\n    callback: AsyncConsumeMeta,\n    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\n    msg_type: Type[BaseModel],\n    **kwargs: Any,\n) -> Tuple[\n    Callable[\n        [\n            List[ConsumerRecord],\n            AsyncConsumeMeta,\n            Callable[[bytes, Type[BaseModel]], Any],\n            Type[BaseModel],\n        ],\n        Awaitable[None],\n    ],\n    Callable[\n        [fastkafka._aiokafka_imports.AIOKafkaConsumer, Any],\n        Awaitable[List[List[ConsumerRecord]]],\n    ],\n]:\n    \"\"\"\n    Retrieves the message handlers for consuming messages in batches from a Kafka topic.\n\n    Args:\n        consumer: The Kafka consumer instance.\n        callback: The callback function to handle the consumed messages.\n        decoder_fn: The function to decode the consumed messages.\n        msg_type: The type of the consumed messages.\n        **kwargs: Additional keyword arguments for the consumer.\n\n    Returns:\n        The handle_msg function and poll_consumer function.\n    \"\"\"\n\n    async def handle_msg(  # type: ignore\n        records: List[ConsumerRecord],\n        callback: AsyncConsumeMeta = callback,\n        decoder_fn: Callable[[bytes, Type[BaseModel]], Any] = decoder_fn,\n        msg_type: Type[BaseModel] = msg_type,\n    ) -> None:\n        await callback(\n            [decoder_fn(record.value, msg_type) for record in records],\n            [EventMetadata.create_event_metadata(record) for record in records],\n        )\n\n    async def poll_consumer(  # type: ignore\n        consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer = consumer,\n        kwargs: Any = kwargs,\n    ) -> List[List[ConsumerRecord]]:\n        msgs = await consumer.getmany(**kwargs)\n        return [value for value in msgs.values() if len(value) > 0]\n\n    return handle_msg, poll_consumer\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 28\n@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer.getmany)\nasync def _aiokafka_consumer_loop(  # type: ignore\n    consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer,\n    *,\n    topic: str,\n    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\n    callback: ConsumeCallable,\n    max_buffer_size: int = 100_000,\n    msg_type: Union[Type[List[BaseModel]], Type[BaseModel]],\n    is_shutting_down_f: Callable[[], bool],\n    executor: Union[str, StreamExecutor, None] = None,\n    **kwargs: Any,\n) -> None:\n    \"\"\"\n    Consumer loop for infinite pooling of the AIOKafka consumer for new messages. Calls consumer.getmany()\n    and after the consumer return messages or times out, messages are decoded and streamed to defined callback.\n\n    Args:\n        topic: Topic to subscribe\n        decoder_fn: Function to decode the messages consumed from the topic\n        callbacks: Dict of callbacks mapped to their respective topics\n        timeout_ms: Time to timeut the getmany request by the consumer\n        max_buffer_size: Maximum number of unconsumed messages in the callback buffer\n        msg_types: Dict of message types mapped to their respective topics\n        is_shutting_down_f: Function for controlling the shutdown of consumer loop\n    \"\"\"\n\n    prepared_callback = _prepare_callback(callback)\n\n    if hasattr(msg_type, \"__origin__\") and msg_type.__origin__ == list:\n        handle_msg, poll_consumer = _get_batch_msg_handlers(\n            consumer=consumer,\n            callback=prepared_callback,\n            decoder_fn=decoder_fn,\n            msg_type=msg_type.__args__[0],  # type: ignore\n            **kwargs,\n        )\n    else:\n        handle_msg, poll_consumer = _get_single_msg_handlers(\n            consumer=consumer,\n            callback=prepared_callback,\n            decoder_fn=decoder_fn,\n            msg_type=msg_type,  # type: ignore\n            **kwargs,\n        )\n\n    await get_executor(executor).run(\n        is_shutting_down_f=is_shutting_down_f,\n        generator=poll_consumer,  # type: ignore\n        processor=handle_msg,  # type: ignore\n    )\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 35\ndef sanitize_kafka_config(**kwargs: Any) -> Dict[str, Any]:\n    \"\"\"Sanitize Kafka config\"\"\"\n    return {k: \"*\" * len(v) if \"pass\" in k.lower() else v for k, v in kwargs.items()}\n\n# %% ../../nbs/011_ConsumerLoop.ipynb 37\n@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer)\n@delegates(_aiokafka_consumer_loop, keep=True)\nasync def aiokafka_consumer_loop(\n    topic: str,\n    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\n    *,\n    timeout_ms: int = 100,\n    max_buffer_size: int = 100_000,\n    callback: ConsumeCallable,\n    msg_type: Union[Type[List[BaseModel]], Type[BaseModel]],\n    is_shutting_down_f: Callable[[], bool],\n    executor: Union[str, StreamExecutor, None] = None,\n    **kwargs: Any,\n) -> None:\n    \"\"\"Consumer loop for infinite pooling of the AIOKafka consumer for new messages. Creates and starts AIOKafkaConsumer\n    and runs _aio_kafka_consumer loop fo infinite poling of the consumer for new messages.\n\n    Args:\n        topic: name of the topic to subscribe to\n        decoder_fn: Function to decode the messages consumed from the topic\n        callback: callback function to be called after decoding and parsing a consumed message\n        timeout_ms: Time to timeut the getmany request by the consumer\n        max_buffer_size: Maximum number of unconsumed messages in the callback buffer\n        msg_type: Type with `parse_json` method used for parsing a decoded message\n        is_shutting_down_f: Function for controlling the shutdown of consumer loop\n    \"\"\"\n    logger.info(f\"aiokafka_consumer_loop() starting...\")\n    try:\n        consumer = fastkafka._aiokafka_imports.AIOKafkaConsumer(\n            **kwargs,\n        )\n        logger.info(\n            f\"aiokafka_consumer_loop(): Consumer created using the following parameters: {sanitize_kafka_config(**kwargs)}\"\n        )\n\n        await consumer.start()\n        logger.info(\"aiokafka_consumer_loop(): Consumer started.\")\n        consumer.subscribe([topic])\n        logger.info(\"aiokafka_consumer_loop(): Consumer subscribed.\")\n\n        try:\n            await _aiokafka_consumer_loop(\n                consumer=consumer,\n                topic=topic,\n                decoder_fn=decoder_fn,\n                max_buffer_size=max_buffer_size,\n                timeout_ms=timeout_ms,\n                callback=callback,\n                msg_type=msg_type,\n                is_shutting_down_f=is_shutting_down_f,\n                executor=executor,\n            )\n        finally:\n            await consumer.stop()\n            logger.info(f\"aiokafka_consumer_loop(): Consumer stopped.\")\n            logger.info(f\"aiokafka_consumer_loop() finished.\")\n    except Exception as e:\n        logger.error(\n            f\"aiokafka_consumer_loop(): unexpected exception raised: '{e.__repr__()}'\"\n        )\n        raise e\n"
  },
  {
    "path": "fastkafka/_components/asyncapi.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/014_AsyncAPI.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'sec_scheme_name_mapping', 'KafkaMessage', 'SecurityType', 'APIKeyLocation', 'SecuritySchema', 'KafkaBroker',\n           'ContactInfo', 'KafkaServiceInfo', 'KafkaBrokers', 'yaml_file_cmp', 'export_async_spec']\n\n# %% ../../nbs/014_AsyncAPI.ipynb 1\nimport json\nimport platform\nimport shutil\nimport subprocess  # nosec: B404: Consider possible security implications associated with the subprocess module.\nimport tempfile\nfrom datetime import timedelta\nfrom enum import Enum\nfrom pathlib import Path\nfrom typing import *\n\nfrom pydantic import ConfigDict, BaseModel, Field, HttpUrl, model_serializer\nfrom pydantic.type_adapter import TypeAdapter\n\nfrom .aiokafka_consumer_loop import ConsumeCallable\nfrom .docs_dependencies import _check_npm_with_local\nfrom .helpers import unwrap_list_type\nfrom .logger import get_logger\nfrom fastkafka._components.producer_decorator import (\n    ProduceCallable,\n    unwrap_from_kafka_event,\n)\n\n# %% ../../nbs/014_AsyncAPI.ipynb 3\nlogger = get_logger(__name__)\n\n# %% ../../nbs/014_AsyncAPI.ipynb 5\nclass KafkaMessage(BaseModel):\n    # This following config is used to properly format timedelta in ISO 8601 format\n    model_config = ConfigDict(ser_json_timedelta=\"iso8601\")\n\n# %% ../../nbs/014_AsyncAPI.ipynb 7\nclass SecurityType(str, Enum):\n    plain = \"plain\"\n    userPassword = \"userPassword\"\n    apiKey = \"apiKey\"\n    X509 = \"X509\"\n    symmetricEncryption = \"symmetricEncryption\"\n    asymmetricEncryption = \"asymmetricEncryption\"\n    httpApiKey = \"httpApiKey\"\n    http = \"http\"\n    oauth2 = \"oauth2\"\n    openIdConnect = \"openIdConnect\"\n    scramSha256 = \"scramSha256\"\n    scramSha512 = \"scramSha512\"\n    gssapi = \"gssapi\"\n\n\nclass APIKeyLocation(str, Enum):\n    user = \"user\"\n    password = \"password\"  # nosec\n    query = \"query\"\n    header = \"header\"\n    cookie = \"cookie\"\n\n\nsec_scheme_name_mapping = {\"security_type\": \"type\", \"api_key_loc\": \"in\"}\n\n\nclass SecuritySchema(BaseModel):\n    security_type: SecurityType = Field(..., example=\"plain\")\n    description: Optional[str] = Field(None, example=\"My security scheme\")\n    name: Optional[str] = Field(None, example=\"my_secret_scheme\")\n    api_key_loc: Optional[APIKeyLocation] = Field(None, example=\"user\")\n    scheme: Optional[str] = None\n    bearerFormat: Optional[str] = None\n    flows: Optional[str] = None\n    openIdConnectUrl: Optional[str] = None\n\n    def __init__(self, **kwargs: Any):\n        for k, v in sec_scheme_name_mapping.items():\n            if v in kwargs:\n                kwargs[k] = kwargs.pop(v)\n        super().__init__(**kwargs)\n\n    def model_dump(self, *args: Any, **kwargs: Any) -> Dict[str, Any]:\n        \"\"\"Renames internal names of members ('security_type' -> 'type', 'api_key_loc' -> 'in')\"\"\"\n        d = super().model_dump(*args, **kwargs)\n\n        for k, v in sec_scheme_name_mapping.items():\n            d[v] = d.pop(k)\n\n        # removes None values\n        d = {k: v for k, v in d.items() if v is not None}\n\n        return d\n\n    def model_dump_json(self, *args: Any, **kwargs: Any) -> str:\n        \"\"\"Serialize into JSON using model_dump()\"\"\"\n        return json.dumps(self.model_dump(), *args, **kwargs)\n\n# %% ../../nbs/014_AsyncAPI.ipynb 9\nclass KafkaBroker(BaseModel):\n    \"\"\"Kafka broker\"\"\"\n\n    url: str = Field(..., example=\"localhost\")\n    description: str = Field(\"Kafka broker\")\n    port: Union[str, int] = Field(\"9092\")\n    protocol: str = Field(\"kafka\")\n    security: Optional[SecuritySchema] = None\n\n    def model_dump(self, *args: Any, **kwargs: Any) -> Dict[str, Any]:\n        \"\"\"Makes port a variable and remove it from the dictionary\"\"\"\n        d = super().model_dump(*args, **kwargs)\n        if self.security:\n            d[\"security\"] = self.security.model_dump(*args, **kwargs)\n        d[\"variables\"] = {\"port\": {\"default\": str(self.port)}}\n        d.pop(\"port\")\n\n        d = {k: v for k, v in d.items() if v is not None}\n\n        return d\n\n    def model_dump_json(self, *args: Any, **kwargs: Any) -> str:\n        \"\"\"Serialize into JSON using dict()\"\"\"\n        return json.dumps(self.model_dump(), *args, **kwargs)\n\n# %% ../../nbs/014_AsyncAPI.ipynb 12\nclass ContactInfo(BaseModel):\n    name: str = Field(..., example=\"My company\")\n    url: HttpUrl = Field(..., example=\"https://www.github.com/mycompany\")\n    email: str = Field(..., example=\"noreply@mycompany.com\")\n\n\nclass KafkaServiceInfo(BaseModel):\n    title: str = Field(\"Title\")\n    version: str = Field(\"0.0.1\")\n    description: str = Field(\"Description of the service\")\n    contact: ContactInfo = Field(\n        ...,\n    )\n\n# %% ../../nbs/014_AsyncAPI.ipynb 14\nclass KafkaBrokers(BaseModel):\n    brokers: Dict[str, Union[List[KafkaBroker], KafkaBroker]]\n\n    def model_dump(self, *args: Any, **kwargs: Any) -> Dict[str, Any]:\n        \"\"\"Transcribe brokers into bootstrap server groups\"\"\"\n        d = super().model_dump(*args, **kwargs)\n\n        brokers = {}\n        for k, v in self.brokers.items():\n            if isinstance(v, list):\n                brokers.update(\n                    {\n                        f\"{k}-bootstrap-server-{i}\": u_v.model_dump()\n                        for i, u_v in enumerate(v)\n                    }\n                )\n            else:\n                brokers.update({f\"{k}\": v.model_dump()})\n        d[\"brokers\"] = brokers\n        d = {k: v for k, v in d.items() if v is not None}\n\n        return d\n\n    def model_dump_json(self, *args: Any, **kwargs: Any) -> str:\n        \"\"\"Serialize into JSON using dict()\"\"\"\n        return json.dumps(self.model_dump(), *args, **kwargs)\n\n# %% ../../nbs/014_AsyncAPI.ipynb 17\n# T = TypeVar(\"T\")\n\n\ndef _get_msg_cls_for_producer(f: ProduceCallable) -> Type[Any]:\n    types = get_type_hints(f)\n    return_type = types.pop(\"return\", type(None))\n    # @app.producer must define a return value\n    if return_type == type(None):\n        raise ValueError(\n            f\"Producer function must have a defined return value, got {return_type} as return value\"\n        )\n\n    return_type = unwrap_from_kafka_event(return_type)\n    return_type = unwrap_list_type(return_type)\n\n    if not hasattr(return_type, \"json\"):\n        raise ValueError(f\"Producer function return value must have json method\")\n    return return_type  # type: ignore\n\n# %% ../../nbs/014_AsyncAPI.ipynb 22\ndef _get_msg_cls_for_consumer(f: ConsumeCallable) -> Type[Any]:\n    types = get_type_hints(f)\n    return_type = types.pop(\"return\", type(None))\n    types_list = list(types.values())\n    # @app.consumer does not return a value\n    if return_type != type(None):\n        raise ValueError(\n            f\"Consumer function cannot return any value, got {return_type}\"\n        )\n    # @app.consumer first consumer argument must be a msg which is a subclass of BaseModel\n    try:\n        msg_type = types_list[0]\n\n        msg_type = unwrap_list_type(msg_type)\n\n        if not issubclass(msg_type, BaseModel):\n            raise ValueError(\n                f\"Consumer function first param must be a BaseModel subclass msg, got {types_list}\"\n            )\n\n        return msg_type  # type: ignore\n\n    except IndexError:\n        raise ValueError(\n            f\"Consumer function first param must be a BaseModel subclass msg, got {types_list}\"\n        )\n\n# %% ../../nbs/014_AsyncAPI.ipynb 27\ndef _get_topic_dict(\n    f: Callable[[Any], Any],\n    direction: str = \"publish\",\n) -> Dict[str, Any]:\n    if not direction in [\"publish\", \"subscribe\"]:\n        raise ValueError(\n            f\"direction must be one of ['publish', 'subscribe'], but it is '{direction}'.\"\n        )\n\n    #     msg_cls = None\n\n    if direction == \"publish\":\n        msg_cls = _get_msg_cls_for_producer(f)\n    elif direction == \"subscribe\":\n        msg_cls = _get_msg_cls_for_consumer(f)\n\n    msg_schema = {\"message\": {\"$ref\": f\"#/components/messages/{msg_cls.__name__}\"}}\n    if hasattr(f, \"description\"):\n        msg_schema[\"description\"] = getattr(f, \"description\")\n    elif f.__doc__ is not None:\n        msg_schema[\"description\"] = f.__doc__  # type: ignore\n    return {direction: msg_schema}\n\n# %% ../../nbs/014_AsyncAPI.ipynb 31\ndef _get_channels_schema(\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n) -> Dict[str, Dict[str, Dict[str, Any]]]:\n    topics = {}\n    for ms, d in zip([consumers, producers], [\"subscribe\", \"publish\"]):\n        for topic, f in ms.items():  # type: ignore\n            topics[topic] = _get_topic_dict(f, d)\n    return topics\n\n# %% ../../nbs/014_AsyncAPI.ipynb 33\ndef _get_kafka_msg_classes(\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n) -> Set[Type[BaseModel]]:\n    fc = [_get_msg_cls_for_consumer(consumer) for consumer in consumers.values()]\n    fp = [_get_msg_cls_for_producer(producer) for producer in producers.values()]\n    return set(fc + fp)\n\n\ndef _get_kafka_msg_definitions(\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n) -> Dict[str, Dict[str, Any]]:\n    msg_classes = _get_kafka_msg_classes(consumers, producers)\n    _, msg_definitions = TypeAdapter.json_schemas(\n        [(msg_cls, \"validation\", TypeAdapter(msg_cls)) for msg_cls in msg_classes]\n    )\n    return msg_definitions\n\n# %% ../../nbs/014_AsyncAPI.ipynb 35\ndef _get_example(cls: Type[BaseModel]) -> BaseModel:\n    kwargs: Dict[str, Any] = {}\n    for k, v in cls.model_fields.items():\n        #         try:\n        if hasattr(v, \"json_schema_extra\") and \"example\" in v.json_schema_extra:  # type: ignore\n            example = v.json_schema_extra[\"example\"]  # type: ignore\n            kwargs[k] = example\n    #         except:\n    #             pass\n    return json.loads(cls(**kwargs).model_dump_json())  # type: ignore\n\n# %% ../../nbs/014_AsyncAPI.ipynb 37\ndef _add_example_to_msg_definitions(\n    msg_cls: Type[BaseModel], msg_schema: Dict[str, Dict[str, Any]]\n) -> None:\n    try:\n        example = _get_example(msg_cls)\n    except Exception as e:\n        example = None\n    if example is not None:\n        msg_schema[\"$defs\"][msg_cls.__name__][\"example\"] = example\n\n\ndef _get_msg_definitions_with_examples(\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n) -> Dict[str, Dict[str, Any]]:\n    msg_classes = _get_kafka_msg_classes(consumers, producers)\n    msg_schema: Dict[str, Dict[str, Any]]\n    _, msg_schema = TypeAdapter.json_schemas(\n        [(msg_cls, \"validation\", TypeAdapter(msg_cls)) for msg_cls in msg_classes]\n    )\n    for msg_cls in msg_classes:\n        _add_example_to_msg_definitions(msg_cls, msg_schema)\n    msg_schema = (\n        {k: {\"payload\": v} for k, v in msg_schema[\"$defs\"].items()}\n        if \"$defs\" in msg_schema\n        else {}\n    )\n\n    return msg_schema\n\n# %% ../../nbs/014_AsyncAPI.ipynb 39\ndef _get_security_schemes(kafka_brokers: KafkaBrokers) -> Dict[str, Any]:\n    security_schemes = {}\n    for key, broker in kafka_brokers.brokers.items():\n        if isinstance(broker, list):\n            kafka_broker = broker[0]\n        else:\n            kafka_broker = broker\n\n        if kafka_broker.security is not None:\n            security_schemes[f\"{key}_default_security\"] = json.loads(\n                kafka_broker.security.model_dump_json()\n            )\n    return security_schemes\n\n# %% ../../nbs/014_AsyncAPI.ipynb 41\ndef _get_components_schema(\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n    kafka_brokers: KafkaBrokers,\n) -> Dict[str, Any]:\n    definitions = _get_msg_definitions_with_examples(consumers, producers)\n    msg_classes = [cls.__name__ for cls in _get_kafka_msg_classes(consumers, producers)]\n    components = {\n        \"messages\": {k: v for k, v in definitions.items() if k in msg_classes},\n        \"schemas\": {k: v for k, v in definitions.items() if k not in msg_classes},\n        \"securitySchemes\": _get_security_schemes(kafka_brokers),\n    }\n    substitutions = {\n        f\"#/$defs/{k}\": f\"#/components/messages/{k}\"\n        if k in msg_classes\n        else f\"#/components/schemas/{k}\"\n        for k in definitions.keys()\n    }\n\n    def _sub_values(d: Any, substitutions: Dict[str, str] = substitutions) -> Any:\n        if isinstance(d, dict):\n            d = {k: _sub_values(v) for k, v in d.items()}\n        if isinstance(d, list):\n            d = [_sub_values(k) for k in d]\n        elif isinstance(d, str):\n            for k, v in substitutions.items():\n                if d == k:\n                    d = v\n        return d\n\n    return _sub_values(components)  # type: ignore\n\n# %% ../../nbs/014_AsyncAPI.ipynb 43\ndef _get_servers_schema(kafka_brokers: KafkaBrokers) -> Dict[str, Any]:\n    servers = json.loads(kafka_brokers.model_dump_json(sort_keys=False))[\"brokers\"]\n\n    for key, kafka_broker in servers.items():\n        if \"security\" in kafka_broker:\n            servers[key][\"security\"] = [{f\"{key}_default_security\": []}]\n    return servers  # type: ignore\n\n# %% ../../nbs/014_AsyncAPI.ipynb 45\ndef _get_asyncapi_schema(\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n    kafka_brokers: KafkaBrokers,\n    kafka_service_info: KafkaServiceInfo,\n) -> Dict[str, Any]:\n    #     # we don't use dict because we need custom JSON encoders\n    info = json.loads(kafka_service_info.model_dump_json())\n    servers = _get_servers_schema(kafka_brokers)\n    #     # should be in the proper format already\n    channels = _get_channels_schema(consumers, producers)\n    components = _get_components_schema(consumers, producers, kafka_brokers)\n    return {\n        \"asyncapi\": \"2.5.0\",\n        \"info\": info,\n        \"servers\": servers,\n        \"channels\": channels,\n        \"components\": components,\n    }\n\n# %% ../../nbs/014_AsyncAPI.ipynb 47\ndef yaml_file_cmp(file_1: Union[Path, str], file_2: Union[Path, str]) -> bool:\n    \"\"\"Compares two YAML files and returns True if their contents are equal, False otherwise.\n\n    Args:\n        file_1: Path or string representing the first YAML file.\n        file_2: Path or string representing the second YAML file.\n\n    Returns:\n        A boolean indicating whether the contents of the two YAML files are equal.\n    \"\"\"\n    try:\n        import yaml\n    except Exception as e:\n        msg = \"Please install docs version of fastkafka using 'pip install fastkafka[docs]' command\"\n        logger.error(msg)\n        raise RuntimeError(msg)\n\n    def _read(f: Union[Path, str]) -> Dict[str, Any]:\n        with open(f) as stream:\n            return yaml.safe_load(stream)  # type: ignore\n\n    d = [_read(f) for f in [file_1, file_2]]\n    return d[0] == d[1]\n\n# %% ../../nbs/014_AsyncAPI.ipynb 48\ndef _generate_async_spec(\n    *,\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n    kafka_brokers: KafkaBrokers,\n    kafka_service_info: KafkaServiceInfo,\n    spec_path: Path,\n    force_rebuild: bool,\n) -> bool:\n    try:\n        import yaml\n    except Exception as e:\n        msg = \"Please install docs version of fastkafka using 'pip install fastkafka[docs]' command\"\n        logger.error(msg)\n        raise RuntimeError(msg)\n\n    # generate spec file\n    asyncapi_schema = _get_asyncapi_schema(\n        consumers, producers, kafka_brokers, kafka_service_info\n    )\n    if not spec_path.exists():\n        logger.info(\n            f\"Old async specifications at '{spec_path.resolve()}' does not exist.\"\n        )\n    spec_path.parent.mkdir(exist_ok=True, parents=True)\n    with tempfile.TemporaryDirectory() as d:\n        with open(Path(d) / \"asyncapi.yml\", \"w\") as f:\n            yaml.dump(asyncapi_schema, f, sort_keys=False)\n        spec_changed = not (\n            spec_path.exists() and yaml_file_cmp(Path(d) / \"asyncapi.yml\", spec_path)\n        )\n        if spec_changed or force_rebuild:\n            shutil.copyfile(Path(d) / \"asyncapi.yml\", spec_path)\n            logger.info(\n                f\"New async specifications generated at: '{spec_path.resolve()}'\"\n            )\n            return True\n        else:\n            logger.info(\n                f\"Keeping the old async specifications at: '{spec_path.resolve()}'\"\n            )\n            return False\n\n# %% ../../nbs/014_AsyncAPI.ipynb 50\ndef _generate_async_docs(\n    *,\n    spec_path: Path,\n    docs_path: Path,\n) -> None:\n    _check_npm_with_local()\n    cmd = [\n        \"npx\",\n        \"-y\",\n        \"-p\",\n        \"@asyncapi/generator\",\n        \"ag\",\n        f\"{spec_path}\",\n        \"@asyncapi/html-template\",\n        \"-o\",\n        f\"{docs_path}\",\n        \"--force-write\",\n    ]\n    # nosemgrep: python.lang.security.audit.subprocess-shell-true.subprocess-shell-true\n    p = subprocess.run(  # nosec: B602, B603 subprocess call - check for execution of untrusted input.\n        cmd,\n        stderr=subprocess.STDOUT,\n        stdout=subprocess.PIPE,\n        shell=True if platform.system() == \"Windows\" else False,\n    )\n    if p.returncode == 0:\n        logger.info(f\"Async docs generated at '{docs_path}'\")\n        logger.info(f\"Output of '$ {' '.join(cmd)}'{p.stdout.decode()}\")\n    else:\n        logger.error(f\"Generation of async docs failed!\")\n        logger.info(f\"Output of '$ {' '.join(cmd)}'{p.stdout.decode()}\")\n        raise ValueError(\n            f\"Generation of async docs failed, used '$ {' '.join(cmd)}'{p.stdout.decode()}\"\n        )\n\n# %% ../../nbs/014_AsyncAPI.ipynb 52\ndef export_async_spec(\n    *,\n    consumers: Dict[str, ConsumeCallable],\n    producers: Dict[str, ProduceCallable],\n    kafka_brokers: KafkaBrokers,\n    kafka_service_info: KafkaServiceInfo,\n    asyncapi_path: Union[Path, str],\n    force_rebuild: bool = True,\n) -> None:\n    \"\"\"Exports the AsyncAPI specification and documentation to the given path.\n\n    Args:\n        consumers: Dictionary of consumer functions, where the keys are the channel names and the values are the consumer functions.\n        producers: Dictionary of producer functions, where the keys are the channel names and the values are the producer functions.\n        kafka_brokers: KafkaBrokers object representing the Kafka brokers configuration.\n        kafka_service_info: KafkaServiceInfo object representing the Kafka service info configuration.\n        asyncapi_path: Path or string representing the base path where the specification and documentation will be exported.\n        force_rebuild: Boolean indicating whether to force a rebuild of the specification file even if it already exists.\n    \"\"\"\n    # generate spec file\n    spec_path = Path(asyncapi_path) / \"spec\" / \"asyncapi.yml\"\n    is_spec_built = _generate_async_spec(\n        consumers=consumers,\n        producers=producers,\n        kafka_brokers=kafka_brokers,\n        kafka_service_info=kafka_service_info,\n        spec_path=spec_path,\n        force_rebuild=force_rebuild,\n    )\n\n    # generate docs folder\n    docs_path = Path(asyncapi_path) / \"docs\"\n\n    if not is_spec_built and docs_path.exists():\n        logger.info(\n            f\"Skipping generating async documentation in '{docs_path.resolve()}'\"\n        )\n        return\n\n    _generate_async_docs(\n        spec_path=spec_path,\n        docs_path=docs_path,\n    )\n"
  },
  {
    "path": "fastkafka/_components/benchmarking.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/017_Benchmarking.ipynb.\n\n# %% auto 0\n__all__ = ['logger']\n\n# %% ../../nbs/017_Benchmarking.ipynb 1\nfrom collections import deque\nfrom datetime import datetime, timedelta\nfrom statistics import mean\nfrom typing import *\n\nfrom .logger import get_logger\n\n# %% ../../nbs/017_Benchmarking.ipynb 4\nlogger = get_logger(\"fastkafka.benchmark\")\n\n# %% ../../nbs/017_Benchmarking.ipynb 5\ndef _benchmark(\n    interval: Union[int, timedelta] = 1,\n    *,\n    sliding_window_size: Optional[int] = None,\n    func_name: str,\n    benchmark_results: Dict[str, Dict[str, Any]],\n) -> None:\n    \"\"\"Used to record the benchmark results(throughput, average throughput, standard deviation) of a given function\n\n    Args:\n        interval: the time interval after which the benchmark results are logged.\n        sliding_window_size: the maximum number of benchmark results to use to calculate average throughput and standard deviation.\n        func_name: the name of the function to be benchmarked.\n        benchmark_results: a dictionary containing the benchmark results of all functions.\n    \"\"\"\n    if isinstance(interval, int):\n        interval = timedelta(seconds=interval)\n    if func_name not in benchmark_results:\n        benchmark_results[func_name] = {\n            \"count\": 0,\n            \"last_count\": 0,\n            \"start\": None,\n            \"last_start\": None,\n            \"history\": [],\n        }\n        if sliding_window_size is not None:\n            benchmark_results[func_name][\"history\"] = deque(maxlen=sliding_window_size)\n\n    benchmark_results[func_name][\"count\"] += 1\n\n    if benchmark_results[func_name][\"count\"] == 1:\n        benchmark_results[func_name][\"start\"] = benchmark_results[func_name][\n            \"last_start\"\n        ] = datetime.utcnow()\n\n    diff = datetime.utcnow() - benchmark_results[func_name][\"last_start\"]\n    if diff >= interval:\n        throughput = (\n            benchmark_results[func_name][\"count\"]\n            - benchmark_results[func_name][\"last_count\"]\n        ) / (diff / timedelta(seconds=1))\n        log_msg = f\"Throughput = {throughput:5,.0f}\"\n\n        if sliding_window_size is not None:\n            benchmark_results[func_name][\"history\"].append(throughput)\n\n            log_msg += f\", Avg throughput = {mean(benchmark_results[func_name]['history']):5,.0f}\"\n        #             if len(benchmark_results[func_name][\"history\"]) > 1:\n        #                 log_msg += f\", Standard deviation of throughput is {stdev(benchmark_results[func_name]['history']):5,.0f}\"\n        log_msg = (\n            log_msg\n            + f\" - For {func_name}(interval={interval.seconds},{sliding_window_size=})\"\n        )\n        logger.info(log_msg)\n\n        benchmark_results[func_name][\"last_start\"] = datetime.utcnow()\n        benchmark_results[func_name][\"last_count\"] = benchmark_results[func_name][\n            \"count\"\n        ]\n"
  },
  {
    "path": "fastkafka/_components/docs_dependencies.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/097_Docs_Dependencies.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'npm_required_major_version', 'node_version', 'node_fname_suffix', 'node_fname', 'node_fname_extension',\n           'node_url', 'local_path', 'tgz_path', 'node_path']\n\n# %% ../../nbs/097_Docs_Dependencies.ipynb 2\nimport asyncio\nimport os\nimport platform\nimport shutil\nimport subprocess  # nosec Issue: [B404:blacklist]\nimport tarfile\nimport zipfile\nfrom pathlib import Path\nfrom tempfile import TemporaryDirectory\n\nfrom .helpers import in_notebook\nfrom .logger import get_logger\n\nif in_notebook():\n    from tqdm.notebook import tqdm\nelse:\n    from tqdm import tqdm\n\n# %% ../../nbs/097_Docs_Dependencies.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../nbs/097_Docs_Dependencies.ipynb 5\nnpm_required_major_version = 9\n\n\ndef _check_npm(required_major_version: int = npm_required_major_version) -> None:\n    \"\"\"\n    Check if npm is installed and its major version is compatible with the required version.\n\n    Args:\n        required_major_version: Required major version of npm. Defaults to 9.\n\n    Raises:\n        RuntimeError: If npm is not found or its major version is lower than the required version.\n    \"\"\"\n    if shutil.which(\"npm\") is not None:\n        cmd = \"npm --version\"\n        proc = subprocess.run(  # nosec [B602:subprocess_popen_with_shell_equals_true]\n            cmd,\n            shell=True,\n            check=True,\n            capture_output=True,\n        )\n        major_version = int(proc.stdout.decode(\"UTF-8\").split(\".\")[0])\n        if major_version < required_major_version:\n            raise RuntimeError(\n                f\"Found installed npm major version: {major_version}, required npx major version: {required_major_version}. To use documentation features of FastKafka, please update npm\"\n            )\n    else:\n        raise RuntimeError(\n            f\"npm not found, to use documentation generation features of FastKafka, you must have npm >= {required_major_version} installed\"\n        )\n\n# %% ../../nbs/097_Docs_Dependencies.ipynb 10\nnode_version = \"v18.15.0\"\nnode_fname_suffix = \"win-x64\" if platform.system() == \"Windows\" else \"linux-x64\"\nnode_fname = f\"node-{node_version}-{node_fname_suffix}\"\nnode_fname_extension = \".zip\" if platform.system() == \"Windows\" else \".tar.xz\"\nnode_url = f\"https://nodejs.org/dist/{node_version}/{node_fname}{node_fname_extension}\"\nlocal_path = (\n    Path(os.path.expanduser(\"~\")).parent / \"Public\"\n    if platform.system() == \"Windows\"\n    else Path(os.path.expanduser(\"~\")) / \".local\"\n)\ntgz_path = local_path / f\"{node_fname}{node_fname_extension}\"\nnode_path = local_path / f\"{node_fname}\"\n\n\ndef _check_npm_with_local(node_path: Path = node_path) -> None:\n    \"\"\"\n    Check if npm is installed and its major version is compatible with the required version.\n    If npm is not found but a local installation of NodeJS is available, add the NodeJS binary path to the system's PATH environment variable.\n\n    Args:\n        node_path: Path to the local installation of NodeJS. Defaults to node_path.\n\n    Raises:\n        RuntimeError: If npm is not found and a local installation of NodeJS is not available.\n    \"\"\"\n    try:\n        _check_npm()\n    except RuntimeError as e:\n        if (node_path).exists():\n            logger.info(\"Found local installation of NodeJS.\")\n            node_binary_path = (\n                f\";{node_path}\"\n                if platform.system() == \"Windows\"\n                else f\":{node_path / 'bin'}\"\n            )\n            os.environ[\"PATH\"] = os.environ[\"PATH\"] + node_binary_path\n            _check_npm()\n        else:\n            raise e\n\n# %% ../../nbs/097_Docs_Dependencies.ipynb 13\ndef _install_node(\n    *,\n    node_url: str = node_url,\n    local_path: Path = local_path,\n    tgz_path: Path = tgz_path,\n) -> None:\n    \"\"\"\n    Install NodeJS by downloading the NodeJS distribution archive, extracting it, and adding the NodeJS binary path to the system's PATH environment variable.\n\n    Args:\n        node_url: URL of the NodeJS distribution archive to download. Defaults to node_url.\n        local_path: Path to store the downloaded distribution archive. Defaults to local_path.\n        tgz_path: Path of the downloaded distribution archive. Defaults to tgz_path.\n    \"\"\"\n    try:\n        import requests\n    except Exception as e:\n        msg = \"Please install docs version of fastkafka using 'pip install fastkafka[docs]' command\"\n        logger.error(msg)\n        raise RuntimeError(msg)\n\n    logger.info(\"Installing NodeJS...\")\n    local_path.mkdir(exist_ok=True, parents=True)\n    response = requests.get(\n        node_url,\n        stream=True,\n        timeout=60,\n    )\n    try:\n        total = response.raw.length_remaining // 128\n    except Exception:\n        total = None\n\n    with open(tgz_path, \"wb\") as f:\n        for data in tqdm(response.iter_content(chunk_size=128), total=total):\n            f.write(data)\n\n    if platform.system() == \"Windows\":\n        with zipfile.ZipFile(tgz_path, \"r\") as zip_ref:\n            zip_ref.extractall(\n                local_path\n            )  # nosec: B202 tarfile_unsafe_members - tarfile.extractall used without any validation. Please check and discard dangerous members.\n    else:\n        with tarfile.open(tgz_path) as tar:\n            for tarinfo in tar:\n                tar.extract(tarinfo, local_path)\n\n    os.environ[\"PATH\"] = (\n        os.environ[\"PATH\"] + f\";{node_path}\"\n        if platform.system() == \"Windows\"\n        else f\":{node_path}/bin\"\n    )\n    logger.info(f\"Node installed in {node_path}.\")\n\n# %% ../../nbs/097_Docs_Dependencies.ipynb 16\nasync def _install_docs_npm_deps() -> None:\n    \"\"\"\n    Install the required npm dependencies for generating the documentation using AsyncAPI generator.\n    \"\"\"\n    with TemporaryDirectory() as d:\n        cmd = (\n            \"npx -y -p @asyncapi/generator ag https://raw.githubusercontent.com/asyncapi/asyncapi/master/examples/simple.yml @asyncapi/html-template -o \"\n            + d\n        )\n\n        proc = await asyncio.create_subprocess_shell(\n            cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE\n        )\n        stdout, stderr = await proc.communicate()\n\n        if proc.returncode == 0:\n            logger.info(\"AsyncAPI generator installed\")\n        else:\n            logger.error(\"AsyncAPI generator NOT installed!\")\n            logger.info(\n                f\"stdout of '$ {cmd}'{stdout.decode('UTF-8')} \\n return_code={proc.returncode}\"\n            )\n            logger.info(\n                f\"stderr of '$ {cmd}'{stderr.decode('UTF-8')} \\n return_code={proc.returncode}\"\n            )\n            raise ValueError(\n                f\"\"\"AsyncAPI generator NOT installed, used '$ {cmd}'\n----------------------------------------\nstdout:\n{stdout.decode(\"UTF-8\")}\n----------------------------------------\nstderr:\n{stderr.decode(\"UTF-8\")}\n----------------------------------------\nreturn_code={proc.returncode}\"\"\"\n            )\n"
  },
  {
    "path": "fastkafka/_components/encoder/__init__.py",
    "content": ""
  },
  {
    "path": "fastkafka/_components/encoder/avro.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../nbs/018_Avro_Encode_Decoder.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'AvroBase', 'avro_encoder', 'avro_decoder', 'avsc_to_pydantic']\n\n# %% ../../../nbs/018_Avro_Encode_Decoder.ipynb 1\nimport io\nimport json\nfrom typing import *\n\nimport fastavro\nfrom pydantic import BaseModel, create_model\n\nfrom ..logger import get_logger\nfrom ..meta import export\n\n# %% ../../../nbs/018_Avro_Encode_Decoder.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../../nbs/018_Avro_Encode_Decoder.ipynb 7\n@export(\"fastkafka.encoder\")\nclass AvroBase(BaseModel):\n    \"\"\"This is base pydantic class that will add some methods\"\"\"\n\n    @classmethod\n    def avro_schema_for_pydantic_object(\n        cls,\n        pydantic_model: BaseModel,\n        by_alias: bool = True,\n        namespace: Optional[str] = None,\n    ) -> Dict[str, Any]:\n        \"\"\"\n        Returns the Avro schema for the given Pydantic object.\n\n        Args:\n            pydantic_model (BaseModel): The Pydantic object.\n            by_alias (bool, optional): Generate schemas using aliases defined. Defaults to True.\n            namespace (Optional[str], optional): Optional namespace string for schema generation.\n\n        Returns:\n            Dict[str, Any]: The Avro schema for the model.\n        \"\"\"\n\n        schema = pydantic_model.__class__.model_json_schema(by_alias=by_alias)\n\n        if namespace is None:\n            # default namespace will be based on title\n            namespace = schema[\"title\"]\n\n        return cls._avro_schema(schema, namespace)\n\n    @classmethod\n    def avro_schema_for_pydantic_class(\n        cls,\n        pydantic_model: Type[BaseModel],\n        by_alias: bool = True,\n        namespace: Optional[str] = None,\n    ) -> Dict[str, Any]:\n        \"\"\"\n        Returns the Avro schema for the given Pydantic class.\n\n        Args:\n            pydantic_model (Type[BaseModel]): The Pydantic class.\n            by_alias (bool, optional): Generate schemas using aliases defined. Defaults to True.\n            namespace (Optional[str], optional): Optional namespace string for schema generation.\n\n        Returns:\n            Dict[str, Any]: The Avro schema for the model.\n        \"\"\"\n\n        schema = pydantic_model.model_json_schema(by_alias=by_alias)\n\n        if namespace is None:\n            # default namespace will be based on title\n            namespace = schema[\"title\"]\n\n        return cls._avro_schema(schema, namespace)\n\n    @classmethod\n    def avro_schema(\n        cls, by_alias: bool = True, namespace: Optional[str] = None\n    ) -> Dict[str, Any]:\n        \"\"\"\n        Returns the Avro schema for the Pydantic class.\n\n        Args:\n            by_alias (bool, optional): Generate schemas using aliases defined. Defaults to True.\n            namespace (Optional[str], optional): Optional namespace string for schema generation.\n\n        Returns:\n            Dict[str, Any]: The Avro schema for the model.\n        \"\"\"\n        schema = cls.schema(by_alias=by_alias)\n\n        if namespace is None:\n            # default namespace will be based on title\n            namespace = schema[\"title\"]\n\n        return cls._avro_schema(schema, namespace)\n\n    @staticmethod\n    def _avro_schema(schema: Dict[str, Any], namespace: str) -> Dict[str, Any]:\n        \"\"\"Return the avro schema for the given pydantic schema\"\"\"\n\n        classes_seen = set()\n\n        def get_definition(ref: str, schema: Dict[str, Any]) -> Dict[str, Any]:\n            \"\"\"Reading definition of base schema for nested structs\"\"\"\n            id = ref.replace(\"#/definitions/\", \"\")\n            d = schema.get(\"definitions\", {}).get(id)\n            if d is None:\n                raise RuntimeError(f\"Definition {id} does not exist\")\n            return d  # type: ignore\n\n        def get_type(value: Dict[str, Any]) -> Dict[str, Any]:\n            \"\"\"Returns a type of a single field\"\"\"\n            t = value.get(\"type\")\n            f = value.get(\"format\")\n            r = value.get(\"$ref\")\n            a = value.get(\"additionalProperties\")\n            avro_type_dict: Dict[str, Any] = {}\n            if \"default\" in value:\n                avro_type_dict[\"default\"] = value.get(\"default\")\n            if \"description\" in value:\n                avro_type_dict[\"doc\"] = value.get(\"description\")\n            if \"allOf\" in value and len(value[\"allOf\"]) == 1:\n                r = value[\"allOf\"][0][\"$ref\"]\n            if r is not None:\n                class_name = r.replace(\"#/definitions/\", \"\")\n                if class_name in classes_seen:\n                    avro_type_dict[\"type\"] = class_name\n                else:\n                    d = get_definition(r, schema)\n                    if \"enum\" in d:\n                        avro_type_dict[\"type\"] = {\n                            \"type\": \"enum\",\n                            \"symbols\": [str(v) for v in d[\"enum\"]],\n                            \"name\": d[\"title\"],\n                        }\n                    else:\n                        avro_type_dict[\"type\"] = {\n                            \"type\": \"record\",\n                            \"fields\": get_fields(d),\n                            # Name of the struct should be unique true the complete schema\n                            # Because of this the path in the schema is tracked and used as name for a nested struct/array\n                            \"name\": class_name,\n                        }\n                    classes_seen.add(class_name)\n            elif t == \"array\":\n                items = value.get(\"items\")\n                tn = get_type(items)  # type: ignore\n                # If items in array are a object:\n                if \"$ref\" in items:  # type: ignore\n                    tn = tn[\"type\"]\n                # If items in array are a logicalType\n                if (\n                    isinstance(tn, dict)\n                    and isinstance(tn.get(\"type\", {}), dict)\n                    and tn.get(\"type\", {}).get(\"logicalType\") is not None\n                ):\n                    tn = tn[\"type\"]\n                avro_type_dict[\"type\"] = {\"type\": \"array\", \"items\": tn}\n            elif t == \"string\" and f == \"date-time\":\n                avro_type_dict[\"type\"] = {\n                    \"type\": \"long\",\n                    \"logicalType\": \"timestamp-micros\",\n                }\n            elif t == \"string\" and f == \"date\":\n                avro_type_dict[\"type\"] = {\n                    \"type\": \"int\",\n                    \"logicalType\": \"date\",\n                }\n            elif t == \"string\" and f == \"time\":\n                avro_type_dict[\"type\"] = {\n                    \"type\": \"long\",\n                    \"logicalType\": \"time-micros\",\n                }\n            elif t == \"string\" and f == \"uuid\":\n                avro_type_dict[\"type\"] = {\n                    \"type\": \"string\",\n                    \"logicalType\": \"uuid\",\n                }\n            elif t == \"string\":\n                avro_type_dict[\"type\"] = \"string\"\n            elif t == \"number\":\n                avro_type_dict[\"type\"] = \"double\"\n            elif t == \"integer\":\n                # integer in python can be a long\n                avro_type_dict[\"type\"] = \"long\"\n            elif t == \"boolean\":\n                avro_type_dict[\"type\"] = \"boolean\"\n            elif t == \"object\":\n                if a is None:\n                    value_type = \"string\"\n                else:\n                    value_type = get_type(a)  # type: ignore\n                if isinstance(value_type, dict) and len(value_type) == 1:\n                    value_type = value_type.get(\"type\")  # type: ignore\n                avro_type_dict[\"type\"] = {\"type\": \"map\", \"values\": value_type}\n            else:\n                raise NotImplementedError(\n                    f\"Type '{t}' not support yet, \"\n                    f\"please report this at https://github.com/godatadriven/pydantic-avro/issues\"\n                )\n            return avro_type_dict\n\n        def get_fields(s: Dict[str, Any]) -> List[Dict[str, Any]]:\n            \"\"\"Return a list of fields of a struct\"\"\"\n            fields = []\n\n            required = s.get(\"required\", [])\n            for key, value in s.get(\"properties\", {}).items():\n                if \"type\" not in value and \"anyOf\" in value:\n                    any_of_types = value.pop(\"anyOf\")\n                    types = [x[\"type\"] for x in any_of_types if x[\"type\"] != \"null\"]\n                    value[\"type\"] = types[0]\n                avro_type_dict = get_type(value)\n                avro_type_dict[\"name\"] = key\n\n                if key not in required:\n                    if avro_type_dict.get(\"default\") is None:\n                        avro_type_dict[\"type\"] = [\"null\", avro_type_dict[\"type\"]]\n                        avro_type_dict[\"default\"] = None\n\n                fields.append(avro_type_dict)\n            return fields\n\n        fields = get_fields(schema)\n\n        return {\n            \"type\": \"record\",\n            \"namespace\": namespace,\n            \"name\": schema[\"title\"],\n            \"fields\": fields,\n        }\n\n# %% ../../../nbs/018_Avro_Encode_Decoder.ipynb 11\n@export(\"fastkafka.encoder\")\ndef avro_encoder(msg: BaseModel) -> bytes:\n    \"\"\"\n    Encoder to encode pydantic instances to avro message\n\n    Args:\n        msg: An instance of pydantic basemodel\n\n    Returns:\n        A bytes message which is encoded from pydantic basemodel\n    \"\"\"\n    schema = fastavro.schema.parse_schema(AvroBase.avro_schema_for_pydantic_object(msg))\n    bytes_writer = io.BytesIO()\n\n    d = msg.model_dump()\n    for k, v in d.items():\n        if \"pydantic_core\" in str(type(v)):\n            d[k] = str(v)\n\n    fastavro.schemaless_writer(bytes_writer, schema, d)\n    raw_bytes = bytes_writer.getvalue()\n    return raw_bytes\n\n# %% ../../../nbs/018_Avro_Encode_Decoder.ipynb 13\n@export(\"fastkafka.encoder\")\ndef avro_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n    \"\"\"\n    Decoder to decode avro encoded messages to pydantic model instance\n\n    Args:\n        raw_msg: Avro encoded bytes message received from Kafka topic\n        cls: Pydantic class; This pydantic class will be used to construct instance of same class\n\n    Returns:\n        An instance of given pydantic class\n    \"\"\"\n    schema = fastavro.schema.parse_schema(AvroBase.avro_schema_for_pydantic_class(cls))\n\n    bytes_reader = io.BytesIO(raw_msg)\n    msg_dict = fastavro.schemaless_reader(bytes_reader, schema)\n\n    return cls(**msg_dict)\n\n# %% ../../../nbs/018_Avro_Encode_Decoder.ipynb 16\n@export(\"fastkafka.encoder\")\ndef avsc_to_pydantic(schema: Dict[str, Any]) -> Type[BaseModel]:\n    \"\"\"\n    Generate pydantic model from given Avro Schema\n\n    Args:\n        schema: Avro schema in dictionary format\n\n    Returns:\n        Pydantic model class built from given avro schema\n    \"\"\"\n    if \"type\" not in schema or schema[\"type\"] != \"record\":\n        raise AttributeError(\"Type not supported\")\n    if \"name\" not in schema:\n        raise AttributeError(\"Name is required\")\n    if \"fields\" not in schema:\n        raise AttributeError(\"fields are required\")\n\n    classes = {}\n\n    def get_python_type(t: Union[str, Dict[str, Any]]) -> str:\n        \"\"\"Returns python type for given avro type\"\"\"\n        optional = False\n        if isinstance(t, str):\n            if t == \"string\":\n                py_type = \"str\"\n            elif t == \"long\" or t == \"int\":\n                py_type = \"int\"\n            elif t == \"boolean\":\n                py_type = \"bool\"\n            elif t == \"double\" or t == \"float\":\n                py_type = \"float\"\n            elif t in classes:\n                py_type = t\n            else:\n                raise NotImplementedError(f\"Type {t} not supported yet\")\n        elif isinstance(t, list):\n            if \"null\" in t:\n                optional = True\n            if len(t) > 2 or (not optional and len(t) > 1):\n                raise NotImplementedError(\"Only a single type ia supported yet\")\n            c = t.copy()\n            c.remove(\"null\")\n            py_type = get_python_type(c[0])\n        elif t.get(\"logicalType\") == \"uuid\":\n            py_type = \"UUID\"\n        elif t.get(\"logicalType\") == \"decimal\":\n            py_type = \"Decimal\"\n        elif (\n            t.get(\"logicalType\") == \"timestamp-millis\"\n            or t.get(\"logicalType\") == \"timestamp-micros\"\n        ):\n            py_type = \"datetime\"\n        elif (\n            t.get(\"logicalType\") == \"time-millis\"\n            or t.get(\"logicalType\") == \"time-micros\"\n        ):\n            py_type = \"time\"\n        elif t.get(\"logicalType\") == \"date\":\n            py_type = \"date\"\n        elif t.get(\"type\") == \"enum\":\n            enum_name = t.get(\"name\")\n            if enum_name not in classes:\n                enum_class = f\"class {enum_name}(str, Enum):\\n\"\n                for s in t.get(\"symbols\"):  # type: ignore\n                    enum_class += f'    {s} = \"{s}\"\\n'\n                classes[enum_name] = enum_class\n            py_type = enum_name  # type: ignore\n        elif t.get(\"type\") == \"string\":\n            py_type = \"str\"\n        elif t.get(\"type\") == \"array\":\n            sub_type = get_python_type(t.get(\"items\"))  # type: ignore\n            py_type = f\"List[{sub_type}]\"\n        elif t.get(\"type\") == \"record\":\n            record_type_to_pydantic(t)\n            py_type = t.get(\"name\")  # type: ignore\n        elif t.get(\"type\") == \"map\":\n            value_type = get_python_type(t.get(\"values\"))  # type: ignore\n            py_type = f\"Dict[str, {value_type}]\"\n        else:\n            raise NotImplementedError(\n                f\"Type {t} not supported yet, \"\n                f\"please report this at https://github.com/godatadriven/pydantic-avro/issues\"\n            )\n        if optional:\n            return f\"Optional[{py_type}]\"\n        else:\n            return py_type\n\n    def record_type_to_pydantic(schema: Dict[str, Any]) -> Type[BaseModel]:\n        \"\"\"Convert a single avro record type to a pydantic class\"\"\"\n        name = (\n            schema[\"name\"]\n            if \".\" not in schema[\"name\"]\n            else schema[\"name\"].split(\".\")[-1]\n        )\n        current = f\"class {schema['name']}(BaseModel):\\n\"\n\n        kwargs: Dict[str, Tuple[str, Any]] = {}\n\n        if len(schema[\"fields\"]) == 0:\n            raise ValueError(\"Avro schema has no fields\")\n\n        for field in schema[\"fields\"]:\n            n = field[\"name\"]\n            t = get_python_type(field[\"type\"])\n            default = field.get(\"default\")\n            if \"default\" not in field:\n                kwargs[n] = (t, ...)\n                current += f\"    {n}: {t}\\n\"\n            elif isinstance(default, (bool, type(None))):\n                kwargs[n] = (t, default)\n                current += f\"    {n}: {t} = {default}\\n\"\n            else:\n                kwargs[n] = (t, default)\n                current += f\"    {n}: {t} = {json.dumps(default)}\\n\"\n\n        classes[name] = current\n        pydantic_model = create_model(name, __module__=__name__, **kwargs)  # type: ignore\n        return pydantic_model  # type: ignore\n\n    return record_type_to_pydantic(schema)\n"
  },
  {
    "path": "fastkafka/_components/encoder/json.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../nbs/019_Json_Encode_Decoder.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'json_encoder', 'json_decoder']\n\n# %% ../../../nbs/019_Json_Encode_Decoder.ipynb 1\nimport json\nfrom typing import *\n\nfrom pydantic import BaseModel\n\nfrom ..logger import get_logger\nfrom ..meta import export\n\n# %% ../../../nbs/019_Json_Encode_Decoder.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../../nbs/019_Json_Encode_Decoder.ipynb 6\ndef _to_json_utf8(o: Any) -> bytes:\n    \"\"\"Converts to JSON and then encodes with UTF-8\"\"\"\n    if hasattr(o, \"model_dump_json\"):\n        return o.model_dump_json().encode(\"utf-8\")  # type: ignore\n    else:\n        return json.dumps(o).encode(\"utf-8\")\n\n# %% ../../../nbs/019_Json_Encode_Decoder.ipynb 9\n@export(\"fastkafka.encoder\")\ndef json_encoder(msg: BaseModel) -> bytes:\n    \"\"\"\n    Encoder to encode pydantic instances to json string\n\n    Args:\n        msg: An instance of pydantic basemodel\n\n    Returns:\n        Json string in bytes which is encoded from pydantic basemodel\n    \"\"\"\n    return _to_json_utf8(msg)\n\n# %% ../../../nbs/019_Json_Encode_Decoder.ipynb 11\n@export(\"fastkafka.encoder\")\ndef json_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n    \"\"\"\n    Decoder to decode json string in bytes to pydantic model instance\n\n    Args:\n        raw_msg: Bytes message received from Kafka topic\n        cls: Pydantic class; This pydantic class will be used to construct instance of same class\n\n    Returns:\n        An instance of given pydantic class\n    \"\"\"\n    msg_dict = json.loads(raw_msg.decode(\"utf-8\"))\n\n    return cls(**msg_dict)\n"
  },
  {
    "path": "fastkafka/_components/helpers.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/998_Internal_Helpers.ipynb.\n\n# %% auto 0\n__all__ = ['in_notebook', 'change_dir', 'ImportFromStringError', 'true_after', 'unwrap_list_type', 'remove_suffix']\n\n# %% ../../nbs/998_Internal_Helpers.ipynb 2\ndef in_notebook() -> bool:\n    \"\"\"\n    Checks if the code is running in a Jupyter notebook or not.\n\n    Returns:\n        True if running in a Jupyter notebook, False otherwise.\n    \"\"\"\n    try:\n        from IPython import get_ipython\n\n        if \"IPKernelApp\" not in get_ipython().config:\n            return False\n    except ImportError:\n        return False\n    except AttributeError:\n        return False\n    return True\n\n# %% ../../nbs/998_Internal_Helpers.ipynb 4\nimport contextlib\nimport importlib\nimport os\nimport sys\nfrom datetime import datetime, timedelta\nfrom inspect import Parameter\nfrom typing import *\n\nimport typer\n\n# %% ../../nbs/998_Internal_Helpers.ipynb 6\n@contextlib.contextmanager\ndef change_dir(d: str) -> Generator[None, None, None]:\n    \"\"\"\n    Changes the current working directory temporarily.\n\n    Args:\n        d: The directory to change to.\n\n    Yields:\n        None.\n    \"\"\"\n    curdir = os.getcwd()\n    os.chdir(d)\n    try:\n        yield\n    finally:\n        os.chdir(curdir)\n\n# %% ../../nbs/998_Internal_Helpers.ipynb 8\nclass ImportFromStringError(Exception):\n    pass\n\n\ndef _import_from_string(import_str: str) -> Any:\n    \"\"\"Imports library from string\n\n    Note:\n        copied from https://github.com/encode/uvicorn/blob/master/uvicorn/importer.py\n\n    Args:\n        import_str: input string in form 'main:app'\n\n    \"\"\"\n    sys.path.append(\".\")\n\n    if not isinstance(import_str, str):\n        return import_str\n\n    module_str, _, attrs_str = import_str.partition(\":\")\n    if not module_str or not attrs_str:\n        message = (\n            'Import string \"{import_str}\" must be in format \"<module>:<attribute>\".'\n        )\n        typer.secho(f\"{message}\", err=True, fg=typer.colors.RED)\n        raise ImportFromStringError(message.format(import_str=import_str))\n\n    try:\n        # nosemgrep: python.lang.security.audit.non-literal-import.non-literal-import\n        module = importlib.import_module(module_str)\n    except ImportError as exc:\n        if exc.name != module_str:\n            raise exc from None\n        message = 'Could not import module \"{module_str}\".'\n        raise ImportFromStringError(message.format(module_str=module_str))\n\n    instance = module\n    try:\n        for attr_str in attrs_str.split(\".\"):\n            instance = getattr(instance, attr_str)\n    except AttributeError:\n        message = 'Attribute \"{attrs_str}\" not found in module \"{module_str}\".'\n        raise ImportFromStringError(\n            message.format(attrs_str=attrs_str, module_str=module_str)\n        )\n\n    return instance\n\n# %% ../../nbs/998_Internal_Helpers.ipynb 10\ndef true_after(seconds: Union[int, float]) -> Callable[[], bool]:\n    \"\"\"Function returning True after a given number of seconds\"\"\"\n    t = datetime.now()\n\n    def _true_after(seconds: Union[int, float] = seconds, t: datetime = t) -> bool:\n        return (datetime.now() - t) > timedelta(seconds=seconds)\n\n    return _true_after\n\n# %% ../../nbs/998_Internal_Helpers.ipynb 12\ndef unwrap_list_type(var_type: Union[Type, Parameter]) -> Union[Type, Parameter]:\n    \"\"\"\n    Unwraps the type of a list.\n\n    Vars:\n        var_type: Type to unwrap.\n\n    Returns:\n        Unwrapped type if the given type is a list, otherwise returns the same type.\n\n    Example:\n        - Input: List[str]\n          Output: str\n        - Input: int\n          Output: int\n    \"\"\"\n    if hasattr(var_type, \"__origin__\") and var_type.__origin__ == list:\n        return var_type.__args__[0]  # type: ignore\n    else:\n        return var_type\n\n# %% ../../nbs/998_Internal_Helpers.ipynb 14\ndef remove_suffix(topic: str) -> str:\n    \"\"\"\n    Removes the suffix from a string by splitting on underscores and joining all but the last element.\n\n    Args:\n        topic: The string to remove the suffix from.\n\n    Returns:\n        The string without the suffix.\n    \"\"\"\n    return \"_\".join(topic.split(\"_\")[:-1])\n"
  },
  {
    "path": "fastkafka/_components/logger.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/Logger.ipynb.\n\n# %% auto 0\n__all__ = ['should_suppress_timestamps', 'logger_spaces_added', 'suppress_timestamps', 'get_default_logger_configuration',\n           'get_logger', 'set_level', 'cached_log']\n\n# %% ../../nbs/Logger.ipynb 2\nimport logging\nimport logging.config\nfrom typing import *\n\nfrom .helpers import true_after\n\n# %% ../../nbs/Logger.ipynb 4\n# Logger Levels\n# CRITICAL = 50\n# ERROR = 40\n# WARNING = 30\n# INFO = 20\n# DEBUG = 10\n# NOTSET = 0\n\nshould_suppress_timestamps: bool = False\n\n\ndef suppress_timestamps(flag: bool = True) -> None:\n    \"\"\"Suppress logger timestamp\n\n    Args:\n        flag: If not set, then the default value **True** will be used to suppress the timestamp\n            from the logger messages\n    \"\"\"\n    global should_suppress_timestamps\n    should_suppress_timestamps = flag\n\n\ndef get_default_logger_configuration(level: int = logging.INFO) -> Dict[str, Any]:\n    \"\"\"Return the common configurations for the logger\n\n    Args:\n        level: Logger level to set\n\n    Returns:\n        A dict with default logger configuration\n\n    \"\"\"\n    global should_suppress_timestamps\n\n    if should_suppress_timestamps:\n        FORMAT = \"[%(levelname)s] %(name)s: %(message)s\"\n    else:\n        FORMAT = \"%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s\"\n\n    DATE_FMT = \"%y-%m-%d %H:%M:%S\"\n\n    LOGGING_CONFIG = {\n        \"version\": 1,\n        \"disable_existing_loggers\": False,\n        \"formatters\": {\n            \"standard\": {\"format\": FORMAT, \"datefmt\": DATE_FMT},\n        },\n        \"handlers\": {\n            \"default\": {\n                \"level\": level,\n                \"formatter\": \"standard\",\n                \"class\": \"logging.StreamHandler\",\n                \"stream\": \"ext://sys.stdout\",  # Default is stderr\n            },\n        },\n        \"loggers\": {\n            \"\": {\"handlers\": [\"default\"], \"level\": level},  # root logger\n        },\n    }\n    return LOGGING_CONFIG\n\n# %% ../../nbs/Logger.ipynb 8\nlogger_spaces_added: List[str] = []\n\n\ndef get_logger(\n    name: str, *, level: int = logging.INFO, add_spaces: bool = True\n) -> logging.Logger:\n    \"\"\"Return the logger class with default logging configuration.\n\n    Args:\n        name: Pass the __name__ variable as name while calling\n        level: Used to configure logging, default value `logging.INFO` logs\n            info messages and up.\n        add_spaces:\n\n    Returns:\n        The logging.Logger class with default/custom logging configuration\n\n    \"\"\"\n    config = get_default_logger_configuration(level=level)\n    logging.config.dictConfig(config)\n\n    logger = logging.getLogger(name)\n    #     stack_size = len(traceback.extract_stack())\n    #     def add_spaces_f(f):\n    #         def f_with_spaces(msg, *args, **kwargs):\n    #             cur_stack_size = len(traceback.extract_stack())\n    #             msg = \" \"*(cur_stack_size-stack_size)*2 + msg\n    #             return f(msg, *args, **kwargs)\n    #         return f_with_spaces\n\n    #     if name not in logger_spaces_added and add_spaces:\n    #         logger.debug = add_spaces_f(logger.debug) # type: ignore\n    #         logger.info = add_spaces_f(logger.info) # type: ignore\n    #         logger.warning = add_spaces_f(logger.warning) # type: ignore\n    #         logger.error = add_spaces_f(logger.error) # type: ignore\n    #         logger.critical = add_spaces_f(logger.critical) # type: ignore\n    #         logger.exception = add_spaces_f(logger.exception) # type: ignore\n\n    #         logger_spaces_added.append(name)\n\n    return logger\n\n# %% ../../nbs/Logger.ipynb 14\ndef set_level(level: int) -> None:\n    \"\"\"Set logger level\n\n    Args:\n        level: Logger level to set\n    \"\"\"\n\n    # Getting all loggers that has either fastkafka or __main__ in the name\n    loggers = [\n        logging.getLogger(name)\n        for name in logging.root.manager.loggerDict\n        if (\"fastkafka\" in name) or (\"__main__\" in name)\n    ]\n\n    for logger in loggers:\n        logger.setLevel(level)\n\n# %% ../../nbs/Logger.ipynb 18\ndef cached_log(\n    self: logging.Logger,\n    msg: str,\n    level: int,\n    timeout: Union[int, float] = 5,\n    log_id: Optional[str] = None,\n) -> None:\n    \"\"\"\n    Logs a message with a specified level only once within a given timeout.\n\n    Args:\n        self: The logger instance.\n        msg: The message to log.\n        level: The logging level for the message.\n        timeout: The timeout duration in seconds.\n        log_id: Id of the log to timeout for timeout time, if None, msg will be used as log_id\n\n    Returns:\n        None\n    \"\"\"\n    if not hasattr(self, \"_timeouted_msgs\"):\n        self._timeouted_msgs = {}  # type: ignore\n\n    key = msg if log_id is None else log_id\n\n    if msg not in self._timeouted_msgs or self._timeouted_msgs[key]():  # type: ignore\n        self._timeouted_msgs[key] = true_after(timeout)  # type: ignore\n\n        self.log(level, msg)\n"
  },
  {
    "path": "fastkafka/_components/meta.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/096_Meta.ipynb.\n\n# %% auto 0\n__all__ = ['TorF', 'T', 'patch', 'combine_params', 'delegates', 'use_parameters_of', 'filter_using_signature', 'export',\n           'classcontextmanager']\n\n# %% ../../nbs/096_Meta.ipynb 1\nimport builtins\nimport copy as cp\nimport functools\nimport inspect\nimport sys\nimport types\nfrom functools import partial, wraps\nfrom types import *\nfrom typing import *\n\nimport docstring_parser\n\n# %% ../../nbs/096_Meta.ipynb 4\ndef test_eq(a: Any, b: Any) -> None:\n    \"`test` that `a==b`\"\n    if a != b:\n        raise ValueError(f\"{a} != {b}\")\n\n# %% ../../nbs/096_Meta.ipynb 6\nF = TypeVar(\"F\", bound=Callable[..., Any])\n\n\ndef copy_func(f: Union[F, FunctionType]) -> Union[F, FunctionType]:\n    \"Copy a non-builtin function (NB `copy.copy` does not work for this)\"\n    if not isinstance(f, FunctionType):\n        return cp.copy(f)\n    fn = FunctionType(\n        f.__code__, f.__globals__, f.__name__, f.__defaults__, f.__closure__\n    )\n    fn.__kwdefaults__ = f.__kwdefaults__\n    fn.__dict__.update(f.__dict__)\n    fn.__annotations__.update(f.__annotations__)\n    fn.__qualname__ = f.__qualname__\n    fn.__doc__ = f.__doc__\n    return fn\n\n# %% ../../nbs/096_Meta.ipynb 11\ndef patch_to(\n    cls: Union[Type, Iterable[Type]], as_prop: bool = False, cls_method: bool = False\n) -> Callable[[F], F]:\n    \"Decorator: add `f` to `cls`\"\n    if not isinstance(cls, (tuple, list)):\n        cls = (cls,)  # type: ignore\n\n    def _inner(f: F) -> F:\n        for c_ in cls:\n            nf = copy_func(f)\n            nm = f.__name__\n            # `functools.update_wrapper` when passing patched function to `Pipeline`, so we do it manually\n            for o in functools.WRAPPER_ASSIGNMENTS:\n                setattr(nf, o, getattr(f, o))\n            nf.__qualname__ = f\"{c_.__name__}.{nm}\"\n            if cls_method:\n                setattr(c_, nm, MethodType(nf, c_))\n            else:\n                setattr(c_, nm, property(nf) if as_prop else nf)\n        # Avoid clobbering existing functions\n        # nosemgrep\n        existing_func = globals().get(nm, builtins.__dict__.get(nm, None))\n        return existing_func  # type: ignore\n\n    return _inner\n\n# %% ../../nbs/096_Meta.ipynb 22\ndef eval_type(\n    t: Sequence, glb: Optional[Dict[str, Any]], loc: Optional[Mapping[str, object]]\n) -> Any:\n    \"`eval` a type or collection of types, if needed, for annotations in py3.10+\"\n    if isinstance(t, str):\n        if \"|\" in t:\n            return Union[eval_type(tuple(t.split(\"|\")), glb, loc)]\n        # nosemgrep\n        return eval(t, glb, loc)  # nosec B307:blacklist\n    if isinstance(t, (tuple, list)):\n        return type(t)([eval_type(c, glb, loc) for c in t])\n    return t\n\n\ndef union2tuple(t) -> Tuple[Any, ...]:  # type: ignore\n    if getattr(t, \"__origin__\", None) is Union:\n        return t.__args__  # type: ignore\n\n    if sys.version_info >= (3, 10):\n        if isinstance(t, UnionType):\n            return t.__args__\n\n    return t  # type: ignore\n\n\ndef get_annotations_ex(\n    obj: Union[FunctionType, Type, F],\n    *,\n    globals: Optional[Dict[str, Any]] = None,\n    locals: Optional[Dict[str, Any]] = None,\n) -> Tuple[Dict[str, Any], Union[Any, Dict[str, Any], None], Dict[str, Any]]:\n    \"Backport of py3.10 `get_annotations` that returns globals/locals\"\n    if isinstance(obj, type):\n        obj_dict = getattr(obj, \"__dict__\", None)\n        if obj_dict and hasattr(obj_dict, \"get\"):\n            ann = obj_dict.get(\"__annotations__\", None)\n            if isinstance(ann, types.GetSetDescriptorType):\n                ann = None\n        else:\n            ann = None\n\n        obj_globals = None\n        module_name = getattr(obj, \"__module__\", None)\n        if module_name:\n            module = sys.modules.get(module_name, None)\n            if module:\n                obj_globals = getattr(module, \"__dict__\", None)\n        obj_locals = dict(vars(obj))\n        unwrap = obj\n    elif isinstance(obj, types.ModuleType):\n        ann = getattr(obj, \"__annotations__\", None)\n        obj_globals = getattr(obj, \"__dict__\")\n        obj_locals, unwrap = None, None\n    elif callable(obj):\n        ann = getattr(obj, \"__annotations__\", None)\n        obj_globals = getattr(obj, \"__globals__\", None)\n        obj_locals, unwrap = None, obj  # type: ignore\n    else:\n        raise TypeError(f\"{obj!r} is not a module, class, or callable.\")\n\n    if ann is None:\n        ann = {}\n    if not isinstance(ann, dict):\n        raise ValueError(f\"{obj!r}.__annotations__ is neither a dict nor None\")\n    if not ann:\n        ann = {}\n\n    if unwrap is not None:\n        while True:\n            if hasattr(unwrap, \"__wrapped__\"):\n                unwrap = unwrap.__wrapped__\n                continue\n            if isinstance(unwrap, functools.partial):\n                unwrap = unwrap.func  # type: ignore\n                continue\n            break\n        if hasattr(unwrap, \"__globals__\"):\n            obj_globals = unwrap.__globals__\n\n    if globals is None:\n        globals = obj_globals\n    if locals is None:\n        locals = obj_locals\n\n    return dict(ann), globals, locals  # type: ignore\n\n# %% ../../nbs/096_Meta.ipynb 23\ndef patch(  # type: ignore\n    f: Optional[F] = None, *, as_prop: bool = False, cls_method: bool = False\n):\n    \"Decorator: add `f` to the first parameter's class (based on f's type annotations)\"\n    if f is None:\n        return partial(patch, as_prop=as_prop, cls_method=cls_method)\n    ann, glb, loc = get_annotations_ex(f)\n    cls = union2tuple(\n        eval_type(ann.pop(\"cls\") if cls_method else next(iter(ann.values())), glb, loc)\n    )\n    return patch_to(cls, as_prop=as_prop, cls_method=cls_method)(f)\n\n# %% ../../nbs/096_Meta.ipynb 35\ndef _delegates_without_docs(\n    to: Optional[F] = None,  # Delegatee\n    keep: bool = False,  # Keep `kwargs` in decorated function?\n    but: Optional[List[str]] = None,  # Exclude these parameters from signature\n) -> Callable[[F], F]:\n    \"Decorator: replace `**kwargs` in signature with params from `to`\"\n    if but is None:\n        but = []\n\n    def _f(f: F) -> F:\n        if to is None:\n            to_f, from_f = f.__base__.__init__, f.__init__  # type: ignore\n        else:\n            to_f, from_f = to.__init__ if isinstance(to, type) else to, f  # type: ignore\n        from_f = getattr(from_f, \"__func__\", from_f)\n        to_f = getattr(to_f, \"__func__\", to_f)\n        if hasattr(from_f, \"__delwrap__\"):\n            return f\n        sig = inspect.signature(from_f)\n        sigd = dict(sig.parameters)\n        if \"kwargs\" in sigd:\n            k = sigd.pop(\"kwargs\")\n        else:\n            k = None\n        s2 = {\n            k: v.replace(kind=inspect.Parameter.KEYWORD_ONLY)\n            for k, v in inspect.signature(to_f).parameters.items()\n            if v.default != inspect.Parameter.empty and k not in sigd and k not in but  # type: ignore\n        }\n        anno = {\n            k: v\n            for k, v in getattr(to_f, \"__annotations__\", {}).items()\n            if k not in sigd and k not in but  # type: ignore\n        }\n        sigd.update(s2)\n        if keep and k is not None:\n            sigd[\"kwargs\"] = k\n        else:\n            from_f.__delwrap__ = to_f\n        from_f.__signature__ = sig.replace(parameters=list(sigd.values()))\n        if hasattr(from_f, \"__annotations__\"):\n            from_f.__annotations__.update(anno)\n        return f\n\n    return _f\n\n# %% ../../nbs/096_Meta.ipynb 45\ndef _format_args(xs: List[docstring_parser.DocstringParam]) -> str:\n    return \"\\nArgs:\\n - \" + \"\\n - \".join(\n        [f\"{x.arg_name} ({x.type_name}): {x.description}\" for x in xs]\n    )\n\n\ndef combine_params(\n    f: F, o: Union[Type, Callable[..., Any]], but: Optional[List[str]] = None\n) -> F:\n    \"\"\"Combines docstring arguments of a function and another object or function\n\n    Args:\n        f: destination functions where combined arguments will end up\n        o: source function from which arguments are taken from\n\n    Returns:\n        Function f with augumented docstring including arguments from both functions/objects\n    \"\"\"\n    if but is None:\n        but = []\n\n    src_params = docstring_parser.parse_from_object(o).params\n    #     logger.info(f\"combine_params(): source:{_format_args(src_params)}\")\n    docs = docstring_parser.parse_from_object(f)\n    #     logger.info(f\"combine_params(): destination:{_format_args(docs.params)}\")\n    dst_params_names = [p.arg_name for p in docs.params]\n\n    combined_params = docs.params + [\n        x\n        for x in src_params\n        if x.arg_name not in dst_params_names and x.arg_name not in but\n    ]\n    #     logger.info(f\"combine_params(): combined:{_format_args(combined_params)}\")\n\n    docs.meta = [\n        x for x in docs.meta if not isinstance(x, docstring_parser.DocstringParam)\n    ] + combined_params  # type: ignore\n\n    f.__doc__ = docstring_parser.compose(\n        docs, style=docstring_parser.DocstringStyle.GOOGLE\n    )\n    return f\n\n# %% ../../nbs/096_Meta.ipynb 48\ndef delegates(\n    o: Union[Type, Callable[..., Any]],\n    keep: bool = False,\n    but: Optional[List[str]] = None,\n) -> Callable[[F], F]:\n    \"\"\"Delegates keyword agruments from o to the function the decorator is applied to\n\n    Args:\n        o: object (class or function) with default kwargs\n        keep: Keep `kwargs` in decorated function?\n        but: argument names not to include\n    \"\"\"\n\n    def _inner(f: F, keep: bool = keep, but: Optional[List[str]] = but) -> F:\n        def _combine_params(\n            o: Union[Type, Callable[..., Any]], but: Optional[List[str]] = None\n        ) -> Callable[[F], F]:\n            def __combine_params(\n                f: F,\n                o: Union[Type, Callable[..., Any]] = o,\n                but: Optional[List[str]] = but,\n            ) -> F:\n                return combine_params(f=f, o=o, but=but)\n\n            return __combine_params\n\n        @_combine_params(o, but=but)  # type: ignore\n        @_delegates_without_docs(o, keep=keep, but=but)  # type: ignore\n        @wraps(f)\n        def _f(*args: Any, **kwargs: Any) -> Any:\n            return f(*args, **kwargs)\n\n        return _f\n\n    return _inner\n\n# %% ../../nbs/096_Meta.ipynb 66\ndef use_parameters_of(\n    o: Union[Type, Callable[..., Any]], **kwargs: Dict[str, Any]\n) -> Dict[str, Any]:\n    \"\"\"Restrict parameters passwed as keyword arguments to parameters from the signature of ``o``\n\n    Args:\n        o: object or callable which signature is used for restricting keyword arguments\n        kwargs: keyword arguments\n\n    Returns:\n        restricted keyword arguments\n\n    \"\"\"\n    allowed_keys = set(inspect.signature(o).parameters.keys())\n    return {k: v for k, v in kwargs.items() if k in allowed_keys}\n\n# %% ../../nbs/096_Meta.ipynb 68\ndef filter_using_signature(f: Callable, **kwargs: Dict[str, Any]) -> Dict[str, Any]:\n    \"\"\"todo: write docs\"\"\"\n    param_names = list(inspect.signature(f).parameters.keys())\n    return {k: v for k, v in kwargs.items() if k in param_names}\n\n# %% ../../nbs/096_Meta.ipynb 70\nTorF = TypeVar(\"TorF\", Type, Callable[..., Any])\n\n\ndef export(module_name: str) -> Callable[[TorF], TorF]:\n    \"\"\"\n    Decorator that sets the __module__ attribute of the decorated object to the specified module name.\n\n    Args:\n        module_name: Name of the module to set as __module__ attribute.\n\n    Returns:\n        Decorator function that sets the __module__ attribute of the decorated object.\n    \"\"\"\n\n    def _inner(o: TorF, module_name: str = module_name) -> TorF:\n        o.__module__ = module_name\n        return o\n\n    return _inner\n\n# %% ../../nbs/096_Meta.ipynb 73\nT = TypeVar(\"T\")\n\n\ndef classcontextmanager(name: str = \"lifecycle\") -> Callable[[Type[T]], Type[T]]:\n    \"\"\"\n    Decorator that adds context manager functionality to a class.\n\n    Args:\n        name: Name of the context manager attribute in the class. Default is \"lifecycle\".\n\n    Returns:\n        Decorator function that adds context manager functionality to the class.\n    \"\"\"\n\n    def _classcontextmanager(cls: Type[T], name: str = name) -> Type[T]:\n        if not hasattr(cls, name):\n            raise ValueError\n\n        @patch\n        def __enter__(self: cls) -> Any:  # type: ignore\n            if not hasattr(self, \"_lifecycle_ctx\"):\n                self._lifecycle_ctx = []  # type: ignore\n\n            self._lifecycle_ctx.append(getattr(self, name)())  # type: ignore\n            return self._lifecycle_ctx[-1].__enter__()  # type: ignore\n\n        @patch\n        def __exit__(self: cls, *args: Any) -> None:  # type: ignore\n            self._lifecycle_ctx.pop(-1).__exit__(*args)  # type: ignore\n\n        return cls\n\n    return _classcontextmanager\n\n# %% ../../nbs/096_Meta.ipynb 76\ndef _get_default_kwargs_from_sig(f: F, **kwargs: Any) -> Dict[str, Any]:\n    \"\"\"\n    Get default values for function **kwargs\n\n    Args:\n        f: Function to extract default values from\n\n    Returns:\n        Dict of default values of function f **kwargs\n    \"\"\"\n    defaults = {\n        k: v.default\n        for k, v in inspect.signature(f).parameters.items()\n        if v.default != inspect._empty\n    }\n    defaults.update(kwargs)\n    return defaults\n"
  },
  {
    "path": "fastkafka/_components/producer_decorator.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/013_ProducerDecorator.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'BaseSubmodel', 'ProduceReturnTypes', 'ProduceCallable', 'KafkaEvent', 'unwrap_from_kafka_event',\n           'release_callback', 'produce_single', 'send_batch', 'produce_batch', 'producer_decorator']\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 1\nimport asyncio\nimport functools\nimport logging\nimport random\nimport time\nfrom asyncio import iscoroutinefunction  # do not use the version from inspect\nfrom dataclasses import dataclass\nfrom functools import partial\nfrom inspect import Parameter\nfrom typing import *\n\nfrom aiokafka import AIOKafkaProducer\nfrom aiokafka.errors import KafkaTimeoutError, RequestTimedOutError\nfrom aiokafka.producer.message_accumulator import BatchBuilder\nfrom pydantic import BaseModel\n\nfrom .logger import get_logger, cached_log\nfrom .meta import export\nfrom .helpers import remove_suffix\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 3\nlogger = get_logger(__name__)\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 5\nBaseSubmodel = TypeVar(\"BaseSubmodel\", bound=Union[List[BaseModel], BaseModel])\nBaseSubmodel\n\n\n@dataclass\n@export(\"fastkafka\")\nclass KafkaEvent(Generic[BaseSubmodel]):\n    \"\"\"\n    A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel\n\n    Attributes:\n        message (BaseSubmodel): The message contained in the Kafka event, can be of type pydantic.BaseModel.\n        key (bytes, optional): The optional key used to identify the Kafka event.\n    \"\"\"\n\n    message: BaseSubmodel\n    key: Optional[bytes] = None\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 7\ndef unwrap_from_kafka_event(var_type: Union[Type, Parameter]) -> Union[Type, Parameter]:\n    \"\"\"\n    Unwraps the type from a KafkaEvent.\n\n    Args:\n        var_type: Type to unwrap.\n\n    Returns:\n        Type: Unwrapped type if the given type is a KafkaEvent, otherwise returns the same type.\n\n    Example:\n        - Input: KafkaEvent[str]\n          Output: str\n        - Input: int\n          Output: int\n    \"\"\"\n    if hasattr(var_type, \"__origin__\") and var_type.__origin__ == KafkaEvent:\n        return var_type.__args__[0]  # type: ignore\n    else:\n        return var_type\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 9\nProduceReturnTypes = Union[\n    BaseModel, KafkaEvent[BaseModel], List[BaseModel], KafkaEvent[List[BaseModel]]\n]\n\nProduceCallable = Union[\n    Callable[..., ProduceReturnTypes], Callable[..., Awaitable[ProduceReturnTypes]]\n]\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 12\ndef _wrap_in_event(\n    message: Union[BaseModel, List[BaseModel], KafkaEvent]\n) -> KafkaEvent:\n    return message if type(message) == KafkaEvent else KafkaEvent(message)\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 15\ndef release_callback(\n    fut: asyncio.Future, topic: str, wrapped_val: KafkaEvent[BaseModel]\n) -> None:\n    if fut.exception() is not None:\n        cached_log(\n            logger,\n            f\"release_callback(): Exception {fut.exception()=}, raised when producing {wrapped_val.message=} to {topic=}\",\n            level=logging.WARNING,\n            timeout=1,\n            log_id=\"release_callback()\",\n        )\n    pass\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 16\nasync def produce_single(  # type: ignore\n    producer: AIOKafkaProducer,\n    topic: str,\n    encoder_fn: Callable[[BaseModel], bytes],\n    wrapped_val: KafkaEvent[BaseModel],\n) -> None:\n    \"\"\"\n    Sends a single message to the Kafka producer.\n\n    Args:\n        producer (AIOKafkaProducer): The Kafka producer object.\n        topic (str): The topic to which the message will be sent.\n        encoder_fn (Callable[[BaseModel], bytes]): The encoding function to encode the message.\n        wrapped_val (KafkaEvent[BaseModel]): The wrapped Kafka event containing the message.\n    \"\"\"\n    while True:\n        try:\n            fut = await producer.send(\n                topic, encoder_fn(wrapped_val.message), key=wrapped_val.key\n            )\n            fut.add_done_callback(\n                partial(release_callback, topic=topic, wrapped_val=wrapped_val)\n            )\n            break\n        except KafkaTimeoutError as e:\n            logger.warning(\n                f\"produce_single(): Exception {e=} raised when producing {wrapped_val.message} to {topic=}, sleeping for 1 second and retrying..\"\n            )\n            await asyncio.sleep(1)\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 21\nasync def send_batch(  # type: ignore\n    producer: AIOKafkaProducer, topic: str, batch: BatchBuilder, key: Optional[bytes]\n) -> None:\n    \"\"\"\n    Sends a batch of messages to the Kafka producer.\n\n    Args:\n        producer (AIOKafkaProducer): The Kafka producer object.\n        topic (str): The topic to which the messages will be sent.\n        batch (BatchBuilder): The batch builder object containing the messages.\n        key (Optional[bytes]): The optional key used to identify the batch of messages.\n\n    Returns:\n        None\n    \"\"\"\n    partitions = await producer.partitions_for(topic)\n    if key == None:\n        partition = random.choice(tuple(partitions))  # nosec\n    else:\n        partition = producer._partition(topic, None, None, None, key, None)\n    while True:\n        try:\n            await producer.send_batch(batch, topic, partition=partition)\n            break\n        except KafkaTimeoutError as e:\n            logger.warning(\n                f\"send_batch(): Exception {e} raised when producing {batch} to {topic=}, sleeping for 1 second and retrying..\"\n            )\n            await asyncio.sleep(1)\n\n\nasync def produce_batch(  # type: ignore\n    producer: AIOKafkaProducer,\n    topic: str,\n    encoder_fn: Callable[[BaseModel], bytes],\n    wrapped_val: KafkaEvent[List[BaseModel]],\n) -> ProduceReturnTypes:\n    \"\"\"\n    Sends a batch of messages to the Kafka producer.\n\n    Args:\n        producer (AIOKafkaProducer): The Kafka producer object.\n        topic (str): The topic to which the messages will be sent.\n        encoder_fn (Callable[[BaseModel], bytes]): The encoding function to encode the messages.\n        wrapped_val (KafkaEvent[List[BaseModel]]): The wrapped Kafka event containing the list of messages.\n\n    Returns:\n        ProduceReturnTypes: The return value from the decorated function.\n    \"\"\"\n    batch = producer.create_batch()\n\n    for message in wrapped_val.message:\n        metadata = batch.append(\n            key=wrapped_val.key,\n            value=encoder_fn(message),\n            timestamp=int(time.time() * 1000),\n        )\n        if metadata == None:\n            # send batch\n            await send_batch(producer, topic, batch, wrapped_val.key)\n            # create new batch\n            batch = producer.create_batch()\n            batch.append(\n                key=None, value=encoder_fn(message), timestamp=int(time.time() * 1000)\n            )\n\n    await send_batch(producer, topic, batch, wrapped_val.key)\n\n# %% ../../nbs/013_ProducerDecorator.ipynb 24\ndef producer_decorator(\n    producer_store: Dict[str, Any],\n    func: ProduceCallable,\n    topic_key: str,\n    encoder_fn: Callable[[BaseModel], bytes],\n) -> ProduceCallable:\n    \"\"\"\n    Decorator for Kafka producer functions.\n\n    Args:\n        producer_store (Dict[str, Any]): Dictionary to store the Kafka producer objects.\n        func (ProduceCallable): The function to be decorated.\n        topic_key (str): The key used to identify the topic.\n        encoder_fn (Callable[[BaseModel], bytes]): The encoding function to encode the messages.\n\n    Returns:\n        ProduceCallable: The decorated function.\n\n    Raises:\n        ValueError: If the decorated function is synchronous.\n    \"\"\"\n\n    @functools.wraps(func)\n    async def _produce_async(\n        *args: List[Any],\n        topic_key: str = topic_key,\n        encoder_fn: Callable[[BaseModel], bytes] = encoder_fn,\n        producer_store: Dict[str, Any] = producer_store,\n        f: Callable[..., Awaitable[ProduceReturnTypes]] = func,  # type: ignore\n        **kwargs: Any,\n    ) -> ProduceReturnTypes:\n        return_val = await f(*args, **kwargs)\n        wrapped_val = _wrap_in_event(return_val)\n        _, producer, _, _ = producer_store[topic_key]\n        topic = remove_suffix(topic_key)\n\n        if isinstance(wrapped_val.message, list):\n            await produce_batch(producer, topic, encoder_fn, wrapped_val)\n        else:\n            await produce_single(producer, topic, encoder_fn, wrapped_val)\n        return return_val\n\n    if not iscoroutinefunction(func):\n        raise ValueError(\n            \"Synchronous functions are not supported for produce operation\"\n        )\n\n    return _produce_async\n"
  },
  {
    "path": "fastkafka/_components/task_streaming.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/006_TaskStreaming.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'TaskPool', 'ExceptionMonitor', 'StreamExecutor', 'DynamicTaskExecutor', 'SequentialExecutor',\n           'get_executor']\n\n# %% ../../nbs/006_TaskStreaming.ipynb 1\nimport asyncio\nimport sys\nfrom abc import ABC, abstractmethod\n\nfrom asyncio import Task\nfrom typing import *\n\nimport anyio\nfrom aiokafka import ConsumerRecord\n\nfrom logging import Logger\nfrom .logger import get_logger\nfrom .meta import export\n\n# %% ../../nbs/006_TaskStreaming.ipynb 3\nlogger = get_logger(__name__)\n\n# %% ../../nbs/006_TaskStreaming.ipynb 10\nclass TaskPool:\n    def __init__(\n        self,\n        size: int = 100_000,\n        on_error: Optional[Callable[[BaseException], None]] = None,\n    ):\n        \"\"\"\n        Initializes a TaskPool instance.\n\n        Args:\n            size: The size of the task pool. Defaults to 100,000.\n            on_error: Optional callback function to handle task errors. Defaults to None.\n\n        Returns:\n            None\n        \"\"\"\n        self.size = size\n        self.pool: Set[Task] = set()\n        self.on_error = on_error\n        self.finished = False\n\n    async def add(self, item: Task) -> None:\n        \"\"\"\n        Adds a task to the task pool.\n\n        Args:\n            item: The task to be added.\n\n        Returns:\n            None\n        \"\"\"\n        while len(self.pool) >= self.size:\n            await asyncio.sleep(0)\n        self.pool.add(item)\n        item.add_done_callback(self.discard)\n\n    def discard(self, task: Task) -> None:\n        \"\"\"\n        Discards a completed task from the task pool.\n\n        Args:\n            task: The completed task to be discarded.\n\n        Returns:\n            None\n        \"\"\"\n        e = task.exception()\n        if e is not None and self.on_error is not None:\n            try:\n                self.on_error(e)\n            except Exception as ee:\n                logger.warning(\n                    f\"Exception {ee} raised when calling on_error() callback: {e}\"\n                )\n\n        self.pool.discard(task)\n\n    def __len__(self) -> int:\n        \"\"\"\n        Returns the number of tasks in the task pool.\n\n        Returns:\n            The number of tasks in the task pool.\n        \"\"\"\n        return len(self.pool)\n\n    async def __aenter__(self) -> \"TaskPool\":\n        self.finished = False\n        return self\n\n    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\n        while len(self) > 0:\n            await asyncio.sleep(0)\n        self.finished = True\n\n    @staticmethod\n    def log_error(logger: Logger) -> Callable[[Exception], None]:\n        \"\"\"\n        Creates a decorator that logs errors using the specified logger.\n\n        Args:\n            logger: The logger to use for error logging.\n\n        Returns:\n            The decorator function.\n        \"\"\"\n\n        def _log_error(e: Exception, logger: Logger = logger) -> None:\n            logger.warning(f\"{e=}\")\n\n        return _log_error\n\n# %% ../../nbs/006_TaskStreaming.ipynb 14\nclass ExceptionMonitor:\n    def __init__(self) -> None:\n        \"\"\"\n        Initializes an ExceptionMonitor instance.\n\n        Returns:\n            None\n        \"\"\"\n        self.exceptions: List[Exception] = []\n        self.exception_found = False\n\n    def on_error(self, e: Exception) -> None:\n        \"\"\"\n        Handles an error by storing the exception.\n\n        Args:\n            e: The exception to be handled.\n\n        Returns:\n            None\n        \"\"\"\n        self.exceptions.append(e)\n        self.exception_found = True\n\n    def _monitor_step(self) -> None:\n        \"\"\"\n        Raises the next exception in the queue.\n\n        Returns:\n            None\n        \"\"\"\n        if len(self.exceptions) > 0:\n            e = self.exceptions.pop(0)\n            raise e\n\n    async def __aenter__(self) -> \"ExceptionMonitor\":\n        return self\n\n    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\n        while len(self.exceptions) > 0:\n            self._monitor_step()\n            await asyncio.sleep(0)\n\n# %% ../../nbs/006_TaskStreaming.ipynb 17\nclass StreamExecutor(ABC):\n    @abstractmethod\n    async def run(  # type: ignore\n        self,\n        *,\n        is_shutting_down_f: Callable[[], bool],\n        generator: Callable[[], Awaitable[ConsumerRecord]],\n        processor: Callable[[ConsumerRecord], Awaitable[None]],\n    ) -> None:\n        \"\"\"\n        Abstract method for running the stream executor.\n\n        Args:\n            is_shutting_down_f: Function to check if the executor is shutting down.\n            generator: Generator function for retrieving consumer records.\n            processor: Processor function for processing consumer records.\n        \"\"\"\n        pass\n\n# %% ../../nbs/006_TaskStreaming.ipynb 20\ndef _process_items_task(  # type: ignore\n    processor: Callable[[ConsumerRecord], Awaitable[None]], task_pool: TaskPool\n) -> Callable[\n    [\n        anyio.streams.memory.MemoryObjectReceiveStream,\n        Callable[[ConsumerRecord], Awaitable[None]],\n        bool,\n    ],\n    Coroutine[Any, Any, Awaitable[None]],\n]:\n    async def _process_items_wrapper(  # type: ignore\n        receive_stream: anyio.streams.memory.MemoryObjectReceiveStream,\n        processor: Callable[[ConsumerRecord], Awaitable[None]] = processor,\n        task_pool=task_pool,\n    ):\n        async with receive_stream:\n            async for msg in receive_stream:\n                task: asyncio.Task = asyncio.create_task(processor(msg))  # type: ignore\n                await task_pool.add(task)\n\n    return _process_items_wrapper\n\n# %% ../../nbs/006_TaskStreaming.ipynb 21\n@export(\"fastkafka.executors\")\nclass DynamicTaskExecutor(StreamExecutor):\n    \"\"\"A class that implements a dynamic task executor for processing consumer records.\n\n    The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\n    for running a tasks in parallel using asyncio.Task.\n    \"\"\"\n\n    def __init__(\n        self,\n        throw_exceptions: bool = False,\n        max_buffer_size: int = 100_000,\n        size: int = 100_000,\n    ):\n        \"\"\"Create an instance of DynamicTaskExecutor\n\n        Args:\n            throw_exceptions: Flag indicating whether exceptions should be thrown ot logged.\n                Defaults to False.\n            max_buffer_size: Maximum buffer size for the memory object stream.\n                Defaults to 100_000.\n            size: Size of the task pool. Defaults to 100_000.\n        \"\"\"\n        self.throw_exceptions = throw_exceptions\n        self.max_buffer_size = max_buffer_size\n        self.exception_monitor = ExceptionMonitor()\n        self.task_pool = TaskPool(\n            on_error=self.exception_monitor.on_error  # type: ignore\n            if throw_exceptions\n            else TaskPool.log_error(logger),\n            size=size,\n        )\n\n    async def run(  # type: ignore\n        self,\n        *,\n        is_shutting_down_f: Callable[[], bool],\n        generator: Callable[[], Awaitable[ConsumerRecord]],\n        processor: Callable[[ConsumerRecord], Awaitable[None]],\n    ) -> None:\n        \"\"\"\n        Runs the dynamic task executor.\n\n        Args:\n            is_shutting_down_f: Function to check if the executor is shutting down.\n            generator: Generator function for retrieving consumer records.\n            processor: Processor function for processing consumer records.\n        \"\"\"\n        send_stream, receive_stream = anyio.create_memory_object_stream(\n            max_buffer_size=self.max_buffer_size\n        )\n\n        async with self.exception_monitor, self.task_pool:\n            async with anyio.create_task_group() as tg:\n                tg.start_soon(\n                    _process_items_task(processor, self.task_pool), receive_stream\n                )\n                async with send_stream:\n                    while not is_shutting_down_f():\n                        if (\n                            self.exception_monitor.exception_found\n                            and self.throw_exceptions\n                        ):\n                            break\n                        msgs = await generator()\n                        for msg in msgs:\n                            await send_stream.send(msg)\n\n# %% ../../nbs/006_TaskStreaming.ipynb 30\ndef _process_items_coro(  # type: ignore\n    processor: Callable[[ConsumerRecord], Awaitable[None]],\n    throw_exceptions: bool,\n) -> Callable[\n    [\n        anyio.streams.memory.MemoryObjectReceiveStream,\n        Callable[[ConsumerRecord], Awaitable[None]],\n        bool,\n    ],\n    Coroutine[Any, Any, Awaitable[None]],\n]:\n    async def _process_items_wrapper(  # type: ignore\n        receive_stream: anyio.streams.memory.MemoryObjectReceiveStream,\n        processor: Callable[[ConsumerRecord], Awaitable[None]] = processor,\n        throw_exceptions: bool = throw_exceptions,\n    ) -> Awaitable[None]:\n        async with receive_stream:\n            async for msg in receive_stream:\n                try:\n                    await processor(msg)\n                except Exception as e:\n                    if throw_exceptions:\n                        raise e\n                    else:\n                        logger.warning(f\"{e=}\")\n\n    return _process_items_wrapper\n\n# %% ../../nbs/006_TaskStreaming.ipynb 31\n@export(\"fastkafka.executors\")\nclass SequentialExecutor(StreamExecutor):\n    \"\"\"A class that implements a sequential executor for processing consumer records.\n\n    The SequentialExecutor class extends the StreamExecutor class and provides functionality\n    for running processing tasks in sequence by awaiting their coroutines.\n    \"\"\"\n\n    def __init__(\n        self,\n        throw_exceptions: bool = False,\n        max_buffer_size: int = 100_000,\n    ):\n        \"\"\"Create an instance of SequentialExecutor\n\n        Args:\n            throw_exceptions: Flag indicating whether exceptions should be thrown or logged.\n                Defaults to False.\n            max_buffer_size: Maximum buffer size for the memory object stream.\n                Defaults to 100_000.\n        \"\"\"\n        self.throw_exceptions = throw_exceptions\n        self.max_buffer_size = max_buffer_size\n\n    async def run(  # type: ignore\n        self,\n        *,\n        is_shutting_down_f: Callable[[], bool],\n        generator: Callable[[], Awaitable[ConsumerRecord]],\n        processor: Callable[[ConsumerRecord], Awaitable[None]],\n    ) -> None:\n        \"\"\"\n        Runs the sequential executor.\n\n        Args:\n            is_shutting_down_f: Function to check if the executor is shutting down.\n            generator: Generator function for retrieving consumer records.\n            processor: Processor function for processing consumer records.\n        \"\"\"\n\n        send_stream, receive_stream = anyio.create_memory_object_stream(\n            max_buffer_size=self.max_buffer_size\n        )\n\n        async with anyio.create_task_group() as tg:\n            tg.start_soon(\n                _process_items_coro(processor, self.throw_exceptions), receive_stream\n            )\n            async with send_stream:\n                while not is_shutting_down_f():\n                    msgs = await generator()\n                    for msg in msgs:\n                        await send_stream.send(msg)\n\n# %% ../../nbs/006_TaskStreaming.ipynb 34\ndef get_executor(executor: Union[str, StreamExecutor, None] = None) -> StreamExecutor:\n    \"\"\"\n    Returns an instance of the specified executor.\n\n    Args:\n        executor: Executor instance or name of the executor.\n\n    Returns:\n        Instance of the specified executor.\n\n    Raises:\n        AttributeError: If the executor is not found.\n    \"\"\"\n    if isinstance(executor, StreamExecutor):\n        return executor\n    elif executor is None:\n        executor = \"SequentialExecutor\"\n    return getattr(sys.modules[\"fastkafka._components.task_streaming\"], executor)()  # type: ignore\n"
  },
  {
    "path": "fastkafka/_components/test_dependencies.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/098_Test_Dependencies.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'kafka_repo_url', 'local_path', 'check_java', 'VersionParser', 'get_kafka_version', 'check_kafka',\n           'generate_app_src', 'generate_app_in_tmp']\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 2\nimport re\nimport platform\nimport shutil\nimport tarfile\nfrom contextlib import contextmanager\nfrom html.parser import HTMLParser\nfrom os import environ, rename\nfrom os.path import expanduser\nfrom pathlib import Path\nfrom tempfile import TemporaryDirectory\nfrom typing import *\n\nfrom packaging import version\n\nfrom .helpers import change_dir, in_notebook\nfrom .logger import get_logger\n\nif in_notebook():\n    from tqdm.notebook import tqdm\nelse:\n    from tqdm import tqdm\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 6\ndef check_java(*, potential_jdk_path: Optional[List[Path]] = None) -> bool:\n    \"\"\"Checks if JDK 11 is installed on the machine and exports it to PATH if necessary.\n\n    Args:\n        potential_jdk_path: Optional. List of potential paths where JDK 11 may be installed.\n                            If not provided, it defaults to searching for JDK 11 in the user's home directory.\n\n    Returns:\n        bool: True if JDK 11 is installed and exported to PATH, False otherwise.\n    \"\"\"\n    if potential_jdk_path is None:\n        potential_jdk_path = list(Path(expanduser(\"~\") + \"/.jdk\").glob(\"jdk-11*\"))\n\n    if potential_jdk_path != []:\n        logger.info(\"Java is already installed.\")\n        if not shutil.which(\"java\"):\n            logger.info(\"But not exported to PATH, exporting...\")\n            env_path_separator = \";\" if platform.system() == \"Windows\" else \":\"\n            environ[\"PATH\"] = (\n                environ[\"PATH\"] + f\"{env_path_separator}{potential_jdk_path[0]/ 'bin'}\"\n            )\n        return True\n    return False\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 8\ndef _install_java() -> None:\n    \"\"\"Checks if jdk-11 is installed on the machine and installs it if not\n\n    Returns:\n       None\n\n    Raises:\n        RuntimeError: If JDK 11 installation fails.\n    \"\"\"\n    try:\n        import jdk\n    except Exception as e:\n        msg = \"Please install test version of fastkafka using 'pip install fastkafka[test]' command\"\n        logger.error(msg)\n        raise RuntimeError(msg)\n\n    if not check_java():\n        logger.info(\"Installing Java...\")\n        logger.info(\" - installing jdk...\")\n        jdk_bin_path = Path(jdk.install(\"11\"))\n        logger.info(f\" - jdk path: {jdk_bin_path}\")\n        env_path_separator = \";\" if platform.system() == \"Windows\" else \":\"\n        environ[\"PATH\"] = (\n            environ[\"PATH\"] + f\"{env_path_separator}{jdk_bin_path / 'bin'}\"\n        )\n        logger.info(\"Java installed.\")\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 10\nclass VersionParser(HTMLParser):\n    \"\"\"\n    A parser class for extracting the newest version of a software from HTML data.\n\n    This class extends the HTMLParser class and provides a mechanism to extract the newest version of a software\n    from HTML data using regular expressions.\n\n    Attributes:\n        newest_version (str): The newest version of the software.\n\n    \"\"\"\n\n    def __init__(self) -> None:\n        \"\"\"\n        Initializes a VersionParser object.\n\n        The newest_version attribute is initialized to \"0.0.0\".\n\n        \"\"\"\n        HTMLParser.__init__(self)\n        self.newest_version = \"0.0.0\"\n\n    def handle_data(self, data: str) -> None:\n        \"\"\"\n        Handles the data encountered in the HTML parsing process.\n\n        This method is called by the HTMLParser base class when data is encountered within HTML tags.\n        It uses regular expressions to search for version numbers in the data and updates the newest_version\n        attribute if a higher version is found.\n\n        Args:\n            data (str): The data encountered during parsing.\n\n        \"\"\"\n        match = re.search(\"[0-9]+\\.[0-9]+\\.[0-9]+\", data)\n        if match is not None:\n            if version.parse(self.newest_version) < version.parse(match.group(0)):\n                self.newest_version = match.group(0)\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 12\nkafka_repo_url = \"https://dlcdn.apache.org/kafka\"\n\n\ndef get_kafka_version(kafka_repo_url: str = kafka_repo_url) -> str:\n    \"\"\"\n    Retrieves the newest version of Kafka from the given Kafka repository URL.\n\n    Args:\n        kafka_repo_url: The URL of the Kafka repository. Defaults to `https://dlcdn.apache.org/kafka`.\n\n    Returns:\n        The newest version of Kafka as a string.\n\n    Raises:\n        RuntimeError: If the requests module is not installed or encounters an error during the request.\n\n    \"\"\"\n    try:\n        import requests\n    except Exception as e:\n        msg = \"Please install test version of fastkafka using 'pip install fastkafka[test]' command\"\n        logger.error(msg)\n        raise RuntimeError(msg)\n\n    parser = VersionParser()\n\n    response = requests.get(\n        kafka_repo_url,\n        timeout=60,\n    )\n    parser.feed(response.text)\n\n    return parser.newest_version\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 13\nlocal_path = (\n    Path(expanduser(\"~\")).parent / \"Public\"\n    if platform.system() == \"Windows\"\n    else Path(expanduser(\"~\")) / \".local\"\n)\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 14\ndef check_kafka(local_path: Path = local_path) -> bool:\n    \"\"\"Checks if Kafka is installed on the machine and exports it to PATH if necessary.\n\n    Args:\n        kafka_path: Path to the Kafka installation directory. Defaults to the global variable `kafka_path`.\n\n    Returns:\n        bool: True if Kafka is installed and exported to PATH, False otherwise.\n    \"\"\"\n\n    kafka_fname = f\"kafka_2.13-{get_kafka_version()}\"\n\n    kafka_path = (\n        local_path / \"kafka\"\n        if platform.system() == \"Windows\"\n        else local_path / f\"{kafka_fname}\"\n    )\n\n    if (kafka_path / \"bin\").exists():\n        logger.info(\"Kafka is installed.\")\n        if not shutil.which(\"kafka-server-start.sh\"):\n            logger.info(\"But not exported to PATH, exporting...\")\n            kafka_binary_path = (\n                f\";{kafka_path / 'bin' / 'windows'}\"\n                if platform.system() == \"Windows\"\n                else f\":{kafka_path / 'bin'}\"\n            )\n            environ[\"PATH\"] = environ[\"PATH\"] + kafka_binary_path\n        return True\n    return False\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 15\ndef _install_kafka(\n    local_path: Path = local_path, kafka_repo_url: str = kafka_repo_url\n) -> None:\n    \"\"\"Checks if Kafka is installed on the machine and installs it if not.\n\n    Args:\n        local_path: Path where the Kafka installation package will be stored. Defaults to the global variable `local_path`.\n        kafka_repo_url: The URL of the Kafka repository. Defaults to `https://dlcdn.apache.org/kafka`.\n    Returns:\n       None\n\n    Raises:\n        RuntimeError: If Kafka installation fails.\n    \"\"\"\n    try:\n        import requests\n    except Exception as e:\n        msg = \"Please install test version of fastkafka using 'pip install fastkafka[test]' command\"\n        logger.error(msg)\n        raise RuntimeError(msg)\n\n    kafka_version = get_kafka_version()\n    kafka_fname = f\"kafka_2.13-{kafka_version}\"\n    kafka_url = f\"{kafka_repo_url}/{kafka_version}/{kafka_fname}.tgz\"\n    tgz_path = local_path / f\"{kafka_fname}.tgz\"\n    kafka_path = (\n        local_path / \"kafka\"\n        if platform.system() == \"Windows\"\n        else local_path / f\"{kafka_fname}\"\n    )\n\n    if not check_kafka():\n        logger.info(\"Installing Kafka...\")\n        local_path.mkdir(exist_ok=True, parents=True)\n        response = requests.get(\n            kafka_url,\n            stream=True,\n            timeout=60,\n        )\n        try:\n            total = response.raw.length_remaining // 128\n        except Exception:\n            total = None\n\n        with open(tgz_path, \"wb\") as f:\n            for data in tqdm(response.iter_content(chunk_size=128), total=total):\n                f.write(data)\n\n        with tarfile.open(tgz_path) as tar:\n            for tarinfo in tar:\n                tar.extract(tarinfo, local_path)\n\n        if platform.system() == \"Windows\":\n            rename(local_path / f\"{kafka_fname}\", kafka_path)\n\n        kafka_binary_path = (\n            f\";{kafka_path / 'bin' / 'windows'}\"\n            if platform.system() == \"Windows\"\n            else f\":{kafka_path / 'bin'}\"\n        )\n        environ[\"PATH\"] = environ[\"PATH\"] + kafka_binary_path\n        logger.info(f\"Kafka installed in {kafka_path}.\")\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 17\ndef _install_testing_deps() -> None:\n    \"\"\"Installs Java and Kafka dependencies required for testing.\n\n    Raises:\n        RuntimeError: If Java or Kafka installation fails.\n    \"\"\"\n    _install_java()\n    _install_kafka()\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 19\ndef generate_app_src(out_path: Union[Path, str]) -> None:\n    \"\"\"Generates the source code for the test application based on a Jupyter notebook.\n\n    Args:\n        out_path: Path where the generated source code will be saved.\n\n    Raises:\n        ValueError: If the Jupyter notebook file does not exist.\n    \"\"\"\n    import nbformat\n    from nbconvert import PythonExporter\n\n    path = Path(\"099_Test_Service.ipynb\")\n    if not path.exists():\n        path = Path(\"..\") / \"099_Test_Service.ipynb\"\n    if not path.exists():\n        raise ValueError(f\"Path '{path.resolve()}' does not exists.\")\n\n    with open(path, \"r\") as f:\n        notebook = nbformat.reads(f.read(), nbformat.NO_CONVERT)\n        exporter = PythonExporter()\n        source, _ = exporter.from_notebook_node(notebook)\n\n    with open(out_path, \"w\") as f:\n        f.write(source)\n\n# %% ../../nbs/098_Test_Dependencies.ipynb 21\n@contextmanager\ndef generate_app_in_tmp() -> Generator[str, None, None]:\n    \"\"\"Context manager that generates the test application source code in a temporary directory.\n\n    Yields:\n        str: Import statement for the generated test application.\n    \"\"\"\n    with TemporaryDirectory() as d:\n        src_path = Path(d) / \"main.py\"\n        generate_app_src(src_path)\n        with change_dir(d):\n            import_str = f\"{src_path.stem}:kafka_app\"\n            yield import_str\n"
  },
  {
    "path": "fastkafka/_docusaurus_helper.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/096_Docusaurus_Helper.ipynb.\n\n# %% auto 0\n__all__ = ['CustomNbdevLookup', 'fix_invalid_syntax_in_markdown', 'generate_markdown_docs', 'generate_sidebar',\n           'delete_unused_markdown_files_from_sidebar', 'update_readme']\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 2\nimport itertools\nimport re\nimport ast\nimport types\nfrom inspect import (\n    Signature,\n    getmembers,\n    isclass,\n    isfunction,\n    signature,\n    ismethod,\n    getsource,\n    Parameter,\n)\nfrom pathlib import Path\nfrom typing import *\nfrom urllib.parse import urljoin\nfrom functools import lru_cache\n\nimport typer\nfrom docstring_parser import parse\nfrom docstring_parser.common import (\n    DocstringParam,\n    DocstringRaises,\n    DocstringReturns,\n    Docstring,\n)\nfrom nbdev.config import get_config\nfrom nbdev.quarto import nbdev_readme\nfrom nbdev.doclinks import NbdevLookup, patch_name, L, _find_mod\nfrom nbdev_mkdocs.mkdocs import (\n    _add_all_submodules,\n    _import_all_members,\n    _import_functions_and_classes,\n    _import_submodules,\n)\nfrom nbdev_mkdocs._helpers.doc_links_utils import (\n    fix_sym_links as update_default_symbol_links,\n)\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 4\ndef _get_return_annotation(s: Signature) -> str:\n    \"\"\"Get the return annotation from the function signature.\n\n    Args:\n        s: The signature of the function from which the annotations must be extracted.\n\n    Returns:\n        The return annotation, or an empty string if not available.\n\n    \"\"\"\n    if s.return_annotation == None or \"inspect._empty\" in str(s.return_annotation):\n        return \"\"\n    if isinstance(s.return_annotation, str):\n        return s.return_annotation\n    ret_val: str = (\n        str(s.return_annotation).replace(\"typing.\", \"\").replace(\"NoneType\", \"None\")\n        if \"typing.\" in str(s.return_annotation)\n        else str(s.return_annotation.__name__)\n    )\n    return ret_val\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 19\ndef _get_param_annotation(param: Parameter) -> str:\n    \"\"\"Get the annotation of a function parameter.\n\n    Args:\n        param: The parameter object.\n\n    Returns:\n        The parameter annotation, or an empty string if not available.\n\n    \"\"\"\n\n    if \"typing.\" in str(param.annotation):\n        return f'`{str(param.annotation).replace(\"typing.\", \"\")}`'\n    elif isinstance(param.annotation, str):\n        return param.annotation\n    else:\n        return (\n            \"\"\n            if param.annotation.__name__ == \"_empty\"\n            else f\"`{param.annotation.__name__}`\"\n        )\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 21\ndef _get_default_value(param: Parameter) -> str:\n    \"\"\"Get the default value of the function parameter.\n\n    Args:\n        param: The parameter object.\n\n    Returns:\n        The default value of the function parameter.\n\n    \"\"\"\n    if param.default is param.empty:\n        return \"*required*\"\n\n    return (\n        f\"`'{param.default}'`\"\n        if isinstance(param.default, str)\n        else f\"`{param.default}`\"\n    )\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 23\ndef _get_params_annotation(s: Signature) -> Dict[str, Dict[str, str]]:\n    \"\"\"Get the annotations along with its default values for the parameters of the symbol.\n\n    Args:\n        s: The signature of the function from which the annotations must be extracted.\n\n    Returns:\n        The parameter annotations along with its default value.\n    \"\"\"\n    return {\n        f\"{param.name}\": {\n            \"type\": _get_param_annotation(param),\n            \"default\": _get_default_value(param),\n        }\n        for param in s.parameters.values()\n    }\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 25\ndef _generate_parameters_table(\n    symbol_annotations: Dict[str, Union[Dict[str, str], str]],\n    section_items: Union[List[DocstringParam]],\n    section_name: str,\n) -> str:\n    \"\"\"Generate parameter table in markdown format\n\n    Args:\n        symbol_annotations: Symbol annotations along with its default value\n        section_items: The parameter section of a parsed docstring\n        section_name: The name of the section\n\n    Returns:\n        The parameters of a symbol in markdown-formatted string\n    \"\"\"\n    nl = \"\\n\"\n    _section_template = (\n        \"|  Name | Type | Description | Default |\\n|---|---|---|---|\\n{section_body}\\n\"\n    )\n    section_body = \"\".join(\n        [\n            f'| `{section.arg_name}` | {symbol_annotations[\"parameters\"][section.arg_name][\"type\"]} | {section.description.replace(nl, \"\")} | {symbol_annotations[\"parameters\"][section.arg_name][\"default\"]} |\\n'  # type: ignore\n            if section.arg_name in symbol_annotations[\"parameters\"]\n            else \"\"\n            for section in section_items\n        ]\n    )\n    return f\"**{section_name}**:\\n\\n\" + _section_template.format(\n        section_body=section_body,\n    )\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 27\ndef _generate_return_and_raises_table(\n    symbol_annotations: Dict[str, Union[Dict[str, str], str]],\n    section_items: Union[List[DocstringReturns], List[DocstringRaises]],\n    section_name: str,\n) -> str:\n    \"\"\"Generate return and raises table in markdown format\n\n    Args:\n        symbol_annotations: Symbol annotations along with its default value\n        section_items: The parameter section of a parsed docstring\n        section_name: The name of the section\n\n    Returns:\n        The return and raises section of a symbol in markdown-formatted string\n    \"\"\"\n    nl = \"\\n\"\n    _section_template = \"|  Type | Description |\\n|---|---|\\n{section_body}\\n\"\n    section_body = \"\".join(\n        [\n            f'| `{symbol_annotations[\"return\"] if section_name == \"Returns\" else section.type_name}` | {section.description.replace(nl, \"\")} |\\n'  # type: ignore\n            for section in section_items\n        ]\n    )\n    return f\"**{section_name}**:\\n\\n\" + _section_template.format(\n        section_body=section_body,\n    )\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 30\ndef _format_docstring_section_items(\n    symbol_annotations: Dict[str, Union[Dict[str, str], str]],\n    section_items: Union[\n        List[DocstringParam], List[DocstringReturns], List[DocstringRaises]\n    ],\n    section_name: str,\n) -> str:\n    \"\"\"Format the docstring sections in a table format\n\n    Args:\n        symbol_annotations: Symbol annotations along with its default value\n        section_items: The parameter section of a parsed docstring\n        section_name: The name of the section\n\n    Returns:\n        The docstring sections of the symbol in markdown-formatted string\n    \"\"\"\n    if section_name == \"Parameters\":\n        return _generate_parameters_table(symbol_annotations, section_items, section_name)  # type: ignore\n    else:\n        return _generate_return_and_raises_table(symbol_annotations, section_items, section_name)  # type: ignore\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 35\ndef _get_annotation(symbol: Type) -> Dict[str, Union[Dict[str, Dict[str, str]], str]]:\n    \"\"\"Get annotations along with its default value for a symbol\n\n    Args:\n        symbol: The symbol for which the annotations needs to be extracted\n\n    Returns:\n        The annotations dict along with its default value\n    \"\"\"\n    symbol = symbol.fget if isinstance(symbol, property) else symbol\n    symbol_signature = signature(symbol)\n    params_dict = _get_params_annotation(symbol_signature)\n    return_annotation = _get_return_annotation(symbol_signature)\n    return {\"parameters\": params_dict, \"return\": return_annotation}\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 38\ndef _format_docstring_sections(symbol: Type, parsed_docstring: Docstring) -> str:\n    \"\"\"Format the parsed docstring sections into markdown-formatted table\n\n    Args:\n        symbol: The symbol for which to parse the docstring.\n        parsed_docstring: A Docstring object\n\n    Returns:\n        The markdown-formatted docstring.\n    \"\"\"\n    symbol_annotations = _get_annotation(symbol)\n    formatted_docstring = \"\"\n    sections = [\n        (\"Parameters\", parsed_docstring.params),\n        (\"Returns\", parsed_docstring.many_returns),\n        (\"Exceptions\", parsed_docstring.raises),\n    ]\n\n    for section_name, section_items in sections:\n        if len(section_items) > 0:  # type: ignore\n            formatted_docstring += _format_docstring_section_items(\n                symbol_annotations, section_items, section_name  # type: ignore\n            )\n\n    return formatted_docstring\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 40\ndef _format_free_links(s: str) -> str:\n    \"\"\"Format free links in a given string by adding proper spacing around them.\n\n    Args:\n        s: The input string containing free links.\n\n    Returns:\n        The modified string with properly formatted free links.\n    \"\"\"\n    pattern = r\"([\\\"'])(https?:\\/\\/[^\\s]+)([\\\"'])\"\n    ret_val = re.sub(\n        pattern, lambda match: f\"{match.group(1)} {match.group(2)} {match.group(3)}\", s\n    )\n    return ret_val\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 42\ndef _docstring_to_markdown(symbol: Type) -> str:\n    \"\"\"Converts a docstring to a markdown-formatted string.\n\n    Args:\n        symbol: The symbol for which the documentation needs to be generated in markdown format.\n\n    Returns:\n        The markdown-formatted docstring.\n    \"\"\"\n    if symbol.__doc__ is None:\n        return \"\"\n\n    parsed_docstring = parse(symbol.__doc__)\n    formatted_docstring = f\"{parsed_docstring.short_description}\\n\\n\"\n    formatted_docstring += (\n        f\"{parsed_docstring.long_description}\\n\\n\"\n        if parsed_docstring.long_description\n        else \"\"\n    )\n    formatted_docstring += _format_docstring_sections(symbol, parsed_docstring)\n    ret_val = _format_free_links(formatted_docstring)\n\n    return ret_val\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 47\ndef _get_submodules(module_name: str) -> List[str]:\n    \"\"\"Get a list of all submodules contained within the module.\n\n    Args:\n        module_name: The name of the module to retrieve submodules from\n\n    Returns:\n        A list of submodule names within the module\n    \"\"\"\n    members = _import_all_members(module_name)\n    members_with_submodules = _add_all_submodules(members)\n    members_with_submodules_str: List[str] = [\n        x[:-1] if x.endswith(\".\") else x for x in members_with_submodules\n    ]\n    return members_with_submodules_str\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 49\ndef _load_submodules(\n    module_name: str, members_with_submodules: List[str]\n) -> List[Type]:\n    \"\"\"Load the given submodules from the module.\n\n    Args:\n        module_name: The name of the module whose submodules to load\n        members_with_submodules: A list of submodule names to load\n\n    Returns:\n        A list of imported submodule objects.\n    \"\"\"\n    submodules = _import_submodules(module_name)\n    members: List[Tuple[str, Type]] = list(\n        itertools.chain(*[_import_functions_and_classes(m) for m in submodules])\n    )\n    names = [\n        y\n        for x, y in members\n        if f\"{y.__module__}.{y.__name__}\" in members_with_submodules\n    ]\n    return names\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 51\ndef _get_parameters(_signature: Signature) -> List[str]:\n    \"\"\"Convert a function's signature into a string representation of its parameter list.\n\n    Args:\n        _signature: The signature object representing the function's signature.\n\n    Returns:\n        A list of strings representing the function's parameters, including their default values if applicable.\n    \"\"\"\n    params = [param for param in _signature.parameters.values()]\n    ret_val = [\n        f\"{param.name}\"\n        if (param.default is param.empty)\n        else f\"{param.name}='{param.default}'\"\n        if isinstance(param.default, str)\n        else f\"{param.name}={param.default}\"\n        for param in params\n    ]\n    return ret_val\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 55\ndef _format_symbol_definition(symbol: Type, params_list: List[str]) -> str:\n    \"\"\"Format the given symbol parameters by adding a new line and indentation.\n\n    Args:\n        symbol: The symbol for which the symbol definition needs to be formatted.\n        params_list: A string representation of the parameter list.\n\n    Returns:\n        A formatted string representation of the parameters with new lines and indentation.\n    \"\"\"\n    parameters = \", \".join(params_list)\n    if parameters == \"\":\n        return f\"{symbol.__name__}()\\n\"\n    elif len(f\"{symbol.__name__}({parameters})\") <= 79:\n        return f\"{symbol.__name__}(\\n    {parameters}\\n)\\n\"\n    else:\n        formatted_parameters = \"\".join([f\"\\n    {param},\" for param in params_list])\n        return f\"{symbol.__name__}({formatted_parameters}\\n)\\n\"\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 62\ndef _get_exps(mod: str) -> Dict[str, str]:\n    mf = _find_mod(mod)\n    if not mf:\n        return {}\n    txt = mf.read_text()\n    _def_types = ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef\n    d = {}\n    for tree in ast.parse(txt).body:\n        if isinstance(tree, _def_types):\n            for t in L(patch_name(tree)):\n                d[t] = f\"{tree.lineno}-L{tree.end_lineno}\"\n        if isinstance(tree, ast.ClassDef):\n            d.update(\n                {\n                    tree.name + \".\" + t2.name: f\"{t2.lineno}-L{t2.end_lineno}\"\n                    for t2 in tree.body\n                    if isinstance(t2, _def_types)\n                }\n            )\n    return d\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 64\ndef _lineno(sym: str, fname: str) -> Optional[str]:\n    return _get_exps(fname).get(sym, None) if fname else None\n\n\n@lru_cache(None)\nclass CustomNbdevLookup(NbdevLookup.__wrapped__):  # type: ignore\n    def __init__(\n        self,\n        strip_libs: Optional[str] = None,\n        incl_libs: Optional[str] = None,\n        skip_mods: Optional[str] = None,\n    ):\n        super().__init__(strip_libs, incl_libs, skip_mods)\n\n    def code(self, sym: str) -> Optional[str]:\n        \"Link to source code for `sym`\"\n        res = self[sym]\n        if not isinstance(res, tuple):\n            return None\n        _, py, gh = res\n        line = _lineno(sym, py)\n        return f\"{gh}#L{line}\"\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 67\ndef _get_symbol_source_link(symbol: Type, lib_version: str) -> str:\n    \"\"\"Returns the source code link for a given symbol.\n\n    Args:\n        symbol: The symbol to get the source code link for.\n        lib_version: The current version of the library.\n\n    Returns:\n        The source code link for the symbol.\n    \"\"\"\n    symbol = symbol.fget if isinstance(symbol, property) else symbol\n    source_link = CustomNbdevLookup().code(f\"{symbol.__qualname__}\")\n\n    if source_link is None:\n        return \"\"\n\n    href = (\n        source_link.replace(\"/blob/main/\", f\"/blob/{lib_version}/\")\n        if lib_version.replace(\".\", \"\").isdigit()\n        else source_link\n    )\n    return f'<a href=\"{href}\" class=\"link-to-source\" target=\"_blank\">View source</a>'\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 71\ndef _get_method_type(symbol: Type) -> str:\n    try:\n        source = getsource(symbol).strip()\n    except (TypeError, OSError) as e:\n        return \"\"\n\n    first_line = source.split(\"\\n\")[0]\n    return (\n        f\"{first_line}\\n\"\n        if first_line\n        in [\"@abstractmethod\", \"@staticmethod\", \"@classmethod\", \"@property\"]\n        else \"\"\n    )\n\n\ndef _get_symbol_definition(symbol: Type, header_level: int, lib_version: str) -> str:\n    \"\"\"Return the definition of a given symbol.\n\n    Args:\n        symbol: A function or method object to get the definition for.\n        header_level: The level of the markdown header to append.\n        lib_version: The current version of the library.\n\n    Returns:\n        A string representing the function definition\n    \"\"\"\n    if isclass(symbol):\n        return f\"{'#'*(header_level - 1)} {symbol.__module__}.{symbol.__name__} {{#{symbol.__module__}.{symbol.__name__}}}\\n\\n{_get_symbol_source_link(symbol, lib_version)}\\n\\n\"\n\n    if isinstance(symbol, property):\n        symbol = symbol.fget\n\n    symbol_anchor = (\n        f\"{'#' * header_level} {symbol.__name__}\"\n        + f\" {{#{symbol.__module__}.{'.'.join([component.strip('_') for component in symbol.__qualname__.rsplit('.', 1)])}}}\\n\\n\"\n    )\n\n    link_to_source = f\"{_get_symbol_source_link(symbol, lib_version)}\\n\\n\"\n\n    _signature = signature(symbol)\n    parameters = _get_parameters(_signature)\n    symbol_definition = f\"```py\\n{_get_method_type(symbol)}{_format_symbol_definition(symbol, parameters)}```\\n\"\n    return symbol_anchor + link_to_source + symbol_definition\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 77\ndef _is_method(symbol: Type) -> bool:\n    \"\"\"Check if the given symbol is a method.\n\n    Args:\n        symbol: A function or method object to check.\n\n    Returns:\n        A boolean indicating whether the symbol is a method.\n    \"\"\"\n    return ismethod(symbol) or isfunction(symbol) or isinstance(symbol, property)\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 79\ndef _get_formatted_docstring_for_symbol(\n    symbol: Type, lib_version: str, header_level: int = 2\n) -> str:\n    \"\"\"Recursively parses and get formatted docstring of a symbol.\n\n    Args:\n        symbol: A Python class or function object to parse the docstring for.\n        lib_version: The current version of the library.\n        header_level: The level of the markdown header to append.\n\n    Returns:\n        A formatted docstring of the symbol and its members.\n\n    \"\"\"\n\n    def traverse(\n        symbol: Type, contents: str, header_level: int, lib_version: str\n    ) -> str:\n        \"\"\"Recursively traverse the members of a symbol and append their docstrings to the provided contents string.\n\n        Args:\n            symbol: A Python class or function object to parse the docstring for.\n            contents: The current formatted docstrings.\n            header_level: The level of the markdown header to append.\n            lib_version: The current version of the library.\n\n        Returns:\n            The updated formatted docstrings.\n\n        \"\"\"\n        for x, y in getmembers(symbol):\n            if not x.startswith(\"_\") or x == \"__init__\":\n                if _is_method(y):\n                    contents += f\"{_get_symbol_definition(y, header_level, lib_version)}\\n{_docstring_to_markdown(y)}\"\n                elif isclass(y) and not x.startswith(\"_\"):\n                    contents += f\"{_get_symbol_definition(y, header_level+1, lib_version)}\\n{_docstring_to_markdown(y)}\"\n                    contents = traverse(y, contents, header_level + 1, lib_version)\n        return contents\n\n    contents = f\"{_get_symbol_definition(symbol, header_level+1, lib_version)}\\n{_docstring_to_markdown(symbol)}\"\n    if isclass(symbol):\n        contents = traverse(symbol, contents, header_level + 1, lib_version)\n    return contents\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 84\ndef _convert_html_style_attribute_to_jsx(contents: str) -> str:\n    \"\"\"Converts the inline style attributes in an HTML string to JSX compatible format.\n\n    Args:\n        contents: A string containing an HTML document or fragment.\n\n    Returns:\n        A string with inline style attributes converted to JSX compatible format.\n    \"\"\"\n    style_regex = re.compile(r'style=\"(.+?)\"')\n    style_matches = style_regex.findall(contents)\n\n    for style_match in style_matches:\n        style_dict = {}\n        styles = style_match.split(\";\")\n        for style in styles:\n            key_value = style.split(\":\")\n            if len(key_value) == 2:\n                key = re.sub(\n                    r\"-(.)\", lambda m: m.group(1).upper(), key_value[0].strip()\n                )\n                value = key_value[1].strip().replace(\"'\", '\"')\n                style_dict[key] = value\n        replacement = \"style={{\"\n        for key, value in style_dict.items():\n            replacement += f\"{key}: '{value}', \"\n        replacement = replacement[:-2] + \"}}\"\n        contents = contents.replace(f'style=\"{style_match}\"', replacement)\n\n    return contents\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 86\ndef _get_all_markdown_files_path(docs_path: Path) -> List[Path]:\n    \"\"\"Get all Markdown files in a directory and its subdirectories.\n\n    Args:\n        directory: The path to the directory to search in.\n\n    Returns:\n        A list of paths to all Markdown files found in the directory and its subdirectories.\n    \"\"\"\n    markdown_files = [file_path for file_path in docs_path.glob(\"**/*.md\")]\n    return markdown_files\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 88\ndef _fix_special_symbols_in_html(contents: str) -> str:\n    contents = contents.replace(\"”\", '\"')\n    return contents\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 90\ndef _add_file_extension_to_link(url: str) -> str:\n    \"\"\"Add file extension to the last segment of a URL\n\n    Args:\n        url: A URL string.\n\n    Returns:\n        A string of the updated URL with a file extension added to the last segment of the URL.\n    \"\"\"\n    segments = url.split(\"/#\")[0].split(\"/\")[-2:]\n    return url.replace(f\"/{segments[1]}\", f\"/{segments[1]}.md\").replace(\".md/#\", \".md#\")\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 94\ndef _generate_production_url(url: str) -> str:\n    \"\"\"Generate a Docusaurus compatible production URL for the given symbol URL.\n\n    Args:\n        url: The symbol URL to be converted.\n\n    Returns:\n        The production URL of the symbol.\n    \"\"\"\n    url_segment, hash_segment = url.split(\".md\")\n    url_split = url_segment.split(\"/\")\n    if url_split[-1].lower() == url_split[-2].lower():\n        return \"/\".join(url_split[:-1]) + hash_segment\n    return url.replace(\".md\", \"\")\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 96\ndef _fix_symbol_links(\n    contents: str,\n    dir_prefix: str,\n    doc_host: str,\n    doc_baseurl: str,\n    use_relative_doc_links: bool = True,\n) -> str:\n    \"\"\"Fix symbol links in Markdown content.\n\n    Args:\n        contents: The Markdown content to search for symbol links.\n        dir_prefix: Directory prefix to append in the relative URL.\n        doc_host: The host URL for the documentation site.\n        doc_baseurl: The base URL for the documentation site.\n        use_relative_doc_links: If set to True, then the relative link to symbols will be added else,\n            production link will be added.\n\n    Returns:\n        str: The Markdown content with updated symbol links.\n    \"\"\"\n    prefix = re.escape(urljoin(doc_host + \"/\", doc_baseurl))\n    pattern = re.compile(rf\"\\[(.*?)\\]\\(({prefix}[^)]+)\\)\")\n    matches = pattern.findall(contents)\n    for match in matches:\n        old_url = match[1]\n        new_url = _add_file_extension_to_link(old_url).replace(\"/api/\", \"/docs/api/\")\n        if use_relative_doc_links:\n            dir_prefix = \"./\" if dir_prefix == \"\" else dir_prefix\n            updated_url = dir_prefix + new_url.split(\"/docs/\")[1]\n        else:\n            updated_url = _generate_production_url(\n                doc_host + doc_baseurl + \"/docs/\" + new_url.split(\"/docs/\")[1]\n            )\n        contents = contents.replace(old_url, updated_url)\n    return contents\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 104\ndef _get_relative_url_prefix(docs_path: Path, sub_path: Path) -> str:\n    \"\"\"Returns a relative url prefix from a sub path to a docs path.\n\n    Args:\n        docs_path (Path): The docs directory path.\n        sub_path (Path): The sub directory path.\n\n    Returns:\n        str: A string representing the relative path from the sub path to the docs path.\n\n    Raises:\n        ValueError: If the sub path is not a descendant of the docs path.\n    \"\"\"\n    try:\n        relative_path = sub_path.relative_to(docs_path)\n    except ValueError:\n        raise ValueError(f\"{sub_path} is not a descendant of {docs_path}\")\n\n    return (\n        \"../\" * (len(relative_path.parts) - 1) if len(relative_path.parts) > 1 else \"\"\n    )\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 106\ndef fix_invalid_syntax_in_markdown(docs_path: str) -> None:\n    \"\"\"Fix invalid HTML syntax in markdown files and converts inline style attributes to JSX-compatible format.\n\n    Args:\n        docs_path: The path to the root directory to search for markdown files.\n    \"\"\"\n    cfg = get_config()\n    doc_host = cfg[\"doc_host\"]\n    doc_baseurl = cfg[\"doc_baseurl\"]\n\n    markdown_files = _get_all_markdown_files_path(Path(docs_path))\n    for file in markdown_files:\n        relative_url_prefix = _get_relative_url_prefix(Path(docs_path), file)\n        contents = Path(file).read_text()\n\n        contents = _convert_html_style_attribute_to_jsx(contents)\n        contents = _fix_special_symbols_in_html(contents)\n        contents = _fix_symbol_links(\n            contents, relative_url_prefix, doc_host, doc_baseurl\n        )\n        file.write_text(contents)\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 108\ndef generate_markdown_docs(module_name: str, docs_path: str) -> None:\n    \"\"\"Generates Markdown documentation files for the symbols in the given module and save them to the given directory.\n\n    Args:\n        module_name: The name of the module to generate documentation for.\n        docs_path: The path to the directory where the documentation files will be saved.\n    \"\"\"\n    members_with_submodules = _get_submodules(module_name)\n    symbols = _load_submodules(module_name, members_with_submodules)\n    lib_version = get_config()[\"version\"]\n\n    for symbol in symbols:\n        content = _get_formatted_docstring_for_symbol(symbol, lib_version)\n        target_file_path = (\n            \"/\".join(f\"{symbol.__module__}.{symbol.__name__}\".split(\".\")) + \".md\"\n        )\n        with open((Path(docs_path) / \"api\" / target_file_path), \"w\") as f:\n            f.write(content)\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 111\ndef _parse_lines(lines: List[str]) -> Tuple[List[str], int]:\n    \"\"\"Parse a list of lines and return a tuple containing a list of filenames and an index indicating how many lines to skip.\n\n    Args:\n        lines: A list of strings representing lines of input text.\n\n    Returns:\n        A tuple containing a list of strings representing the filenames extracted\n        from links in the lines and an integer representing the number of lines to skip.\n    \"\"\"\n    index = next(\n        (i for i, line in enumerate(lines) if not line.strip().startswith(\"- [\")),\n        len(lines),\n    )\n    return [line.split(\"(\")[1][:-4] for line in lines[:index]], index\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 114\ndef _parse_section(text: str, ignore_first_line: bool = False) -> List[Any]:\n    \"\"\"Parse the given section contents and return a list of file names in the expected format.\n\n    Args:\n        text: A string representing the contents of a file.\n        ignore_first_line: Flag indicating whether to ignore the first line extracting the section contents.\n\n    Returns:\n        A list of filenames in the expected format\n    \"\"\"\n    pattern = r\"\\[.*?\\]\\((.*?)\\)|\\[(.*?)\\]\\[(.*?)\\]\"\n    lines = text.split(\"\\n\")[1:] if ignore_first_line else text.split(\"\\n\")\n    ret_val = []\n    index = 0\n    while index < len(lines):\n        line = lines[index]\n        match = re.search(pattern, line.strip())\n        if match is not None:\n            ret_val.append(match.group(1).split(\".md\")[0])\n            index += 1\n        elif line.strip() != \"\":\n            value, skip_lines = _parse_lines(lines[index + 1 :])\n            ret_val.append({line.replace(\"-\", \"\").strip(): value})\n            index += skip_lines + 1\n        else:\n            index += 1\n    return ret_val\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 117\ndef _get_section_from_markdown(\n    markdown_text: str, section_header: str\n) -> Optional[str]:\n    \"\"\"Get the contents of the section header from the given markdown text\n\n    Args:\n        markdown_text: A string containing the markdown text to extract the section from.\n        section_header: A string representing the header of the section to extract.\n\n    Returns:\n        A string representing the contents of the section header if the section header\n        is present in the markdown text, else None\n    \"\"\"\n    pattern = re.compile(rf\"^- {section_header}\\n((?:\\s+- .*\\n)+)\", re.M)\n    match = pattern.search(markdown_text)\n    return match.group(1) if match else None\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 122\ndef generate_sidebar(\n    summary_file: str = \"./docusaurus/docs/SUMMARY.md\",\n    summary: str = \"\",\n    target: str = \"./docusaurus/sidebars.js\",\n) -> None:\n    \"\"\"\n    Generate a sidebar js file for a Docusaurus documentation site based on a SUMMARY.md file.\n\n    Args:\n        summary_file: The path to the SUMMARY.md file containing the documentation structure.\n            Default is \"./docusaurus/docs/SUMMARY.md\".\n        summary: An optional summary string.\n            Default is an empty string.\n        target: The path to the target sidebar js file to be generated.\n            Default is \"./docusaurus/sidebars.js\".\n\n    Returns:\n        None: The function does not return any value directly, but it generates a sidebar file.\n\n    Raises:\n        FileNotFoundError: If the specified `summary_file` does not exist.\n    \"\"\"\n    with open(summary_file, \"r\") as stream, open(target, \"w\") as target_stream:\n        summary_contents = stream.read()\n\n        guides_summary = _get_section_from_markdown(summary_contents, \"Guides\")\n        parsed_guides = _parse_section(guides_summary)  # type: ignore\n\n        api_summary = _get_section_from_markdown(summary_contents, \"API\")\n        parsed_api = _parse_section(api_summary, True)  # type: ignore\n\n        cli_summary = _get_section_from_markdown(summary_contents, \"CLI\")\n        parsed_cli = _parse_section(cli_summary)  # type: ignore\n\n        target_stream.write(\n            \"\"\"module.exports = {\ntutorialSidebar: [\n    'index', {'Guides': \n    \"\"\"\n            + str(parsed_guides)\n            + \"},\"\n            + \"{'API': [\"\n            + str(parsed_api)[1:-1]\n            + \"]},\"\n            + \"{'CLI': \"\n            + str(parsed_cli)\n            + \"},\"\n            + \"\"\"\n    \"LICENSE\",\n    \"CONTRIBUTING\",\n    \"CHANGELOG\",\n],\n};\"\"\"\n        )\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 124\ndef _get_markdown_filenames_from_sidebar(sidebar_file_path: str) -> List[str]:\n    \"\"\"Get a list of Markdown filenames included in the sidebar.\n\n    Args:\n        sidebar_file_path: The path to the sidebar file.\n\n    Returns:\n        A list of Markdown filenames included in the sidebar.\n    \"\"\"\n    with open(sidebar_file_path, \"r\") as file:\n        file_content = file.read()\n\n        pattern = r\"tutorialSidebar:\\s*(\\[.*\\])\\s*,\\s*\\n?\\s*};\"\n        match = re.search(pattern, file_content, re.DOTALL)\n        all_sidebar_files = ast.literal_eval(match.group(1)) if match else []\n        markdown_filenames = [\n            f\"{v}.md\" for v in all_sidebar_files if isinstance(v, str)\n        ]\n        return markdown_filenames\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 126\ndef _delete_files(files: List[Path]) -> None:\n    \"\"\"Deletes a list of files.\n\n    Args:\n        files: A list of Path objects representing the files to be deleted.\n\n    Raises:\n        OSError: If an error occurs while deleting a file.\n\n    \"\"\"\n    for file in files:\n        try:\n            file.unlink()\n        except OSError as e:\n            typer.echo(\n                f\"Error deleting files from docusaurus/docs directory. Could not delete file: {file} - {e}\"\n            )\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 129\ndef delete_unused_markdown_files_from_sidebar(\n    docs_path: str, sidebar_file_path: str\n) -> None:\n    \"\"\"Delete the markdown files from the docs directory that are not present in the sidebar.\n\n    Args:\n        docs_path: Path to the directory containing the markdown files.\n        sidebar_file_path: Path to the sidebar file.\n    \"\"\"\n    md_filenames_in_sidebar = _get_markdown_filenames_from_sidebar(\n        str(sidebar_file_path)\n    )\n    if len(md_filenames_in_sidebar) > 0:\n        all_md_files_in_docs_dir = [\n            file_path for file_path in Path(docs_path).glob(\"*.md\")\n        ]\n        md_files_in_sidebar = [Path(docs_path) / f for f in md_filenames_in_sidebar]\n        md_files_to_delete = list(\n            set(all_md_files_in_docs_dir) - set(md_files_in_sidebar)\n        )\n        _delete_files(md_files_to_delete)\n\n# %% ../nbs/096_Docusaurus_Helper.ipynb 131\ndef update_readme() -> None:\n    \"\"\"Update the readme file and fix the symbol links\"\"\"\n    cfg = get_config()\n    readme_path = cfg.config_path / \"README.md\"\n    nbdev_readme.__wrapped__()\n\n    with open(readme_path, \"r\", encoding=\"utf-8\") as f:\n        contents = f.read()\n\n    contents = update_default_symbol_links(\n        contents, NbdevLookup(incl_libs=cfg.lib_path.name), \"\", \"\", False\n    )\n    contents = _fix_symbol_links(contents, \"./\", cfg.doc_host, cfg.doc_baseurl, False)\n\n    with open(readme_path, \"w\", encoding=\"utf-8\") as f:\n        f.write(contents)\n"
  },
  {
    "path": "fastkafka/_helpers.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/999_Helpers.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'aiokafka2confluent', 'confluent2aiokafka', 'produce_messages', 'consumes_messages',\n           'produce_and_consume_messages', 'get_collapsible_admonition', 'source2markdown', 'wait_for_get_url']\n\n# %% ../nbs/999_Helpers.ipynb 2\nimport asyncio\nimport inspect\nimport json\nimport textwrap\nfrom datetime import datetime, timedelta\nfrom typing import *\n\nimport aiohttp\nimport anyio\nfrom ._aiokafka_imports import AIOKafkaProducer, AIOKafkaConsumer\nfrom aiokafka.helpers import create_ssl_context\nfrom aiokafka.structs import RecordMetadata\nfrom IPython.display import Markdown\n\nfrom ._components.helpers import in_notebook\nfrom ._components.logger import get_logger\nfrom ._components.meta import delegates\n\n# %% ../nbs/999_Helpers.ipynb 4\nif in_notebook():\n    from tqdm.notebook import tqdm\nelse:\n    from tqdm import tqdm\n\n# %% ../nbs/999_Helpers.ipynb 6\nlogger = get_logger(__name__)\n\n# %% ../nbs/999_Helpers.ipynb 9\n@delegates(AIOKafkaProducer)\ndef aiokafka2confluent(**kwargs: Dict[str, Any]) -> Dict[str, Any]:\n    \"\"\"Converts AIOKafka styled config dictionary into Confluence styled one\n\n    Returns (Dict[str, Any]):\n        Confluence styled config dictionary\n\n    Args:\n        bootstrap_servers (str, list(str)): a ``host[:port]`` string or list of\n            ``host[:port]`` strings that the producer should contact to\n            bootstrap initial cluster metadata. This does not have to be the\n            full node list.  It just needs to have at least one broker that will\n            respond to a Metadata API Request. Default port is 9092. If no\n            servers are specified, will default to ``localhost:9092``.\n        client_id (str): a name for this client. This string is passed in\n            each request to servers and can be used to identify specific\n            server-side log entries that correspond to this client.\n            Default: ``aiokafka-producer-#`` (appended with a unique number\n            per instance)\n        key_serializer (Callable): used to convert user-supplied keys to bytes\n            If not :data:`None`, called as ``f(key),`` should return\n            :class:`bytes`.\n            Default: :data:`None`.\n        value_serializer (Callable): used to convert user-supplied message\n            values to :class:`bytes`. If not :data:`None`, called as\n            ``f(value)``, should return :class:`bytes`.\n            Default: :data:`None`.\n        acks (Any): one of ``0``, ``1``, ``all``. The number of acknowledgments\n            the producer requires the leader to have received before considering a\n            request complete. This controls the durability of records that are\n            sent. The following settings are common:\n\n            * ``0``: Producer will not wait for any acknowledgment from the server\n              at all. The message will immediately be added to the socket\n              buffer and considered sent. No guarantee can be made that the\n              server has received the record in this case, and the retries\n              configuration will not take effect (as the client won't\n              generally know of any failures). The offset given back for each\n              record will always be set to -1.\n            * ``1``: The broker leader will write the record to its local log but\n              will respond without awaiting full acknowledgement from all\n              followers. In this case should the leader fail immediately\n              after acknowledging the record but before the followers have\n              replicated it then the record will be lost.\n            * ``all``: The broker leader will wait for the full set of in-sync\n              replicas to acknowledge the record. This guarantees that the\n              record will not be lost as long as at least one in-sync replica\n              remains alive. This is the strongest available guarantee.\n\n            If unset, defaults to ``acks=1``. If `enable_idempotence` is\n            :data:`True` defaults to ``acks=all``\n        compression_type (str): The compression type for all data generated by\n            the producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\n            or :data:`None`.\n            Compression is of full batches of data, so the efficacy of batching\n            will also impact the compression ratio (more batching means better\n            compression). Default: :data:`None`.\n        max_batch_size (int): Maximum size of buffered data per partition.\n            After this amount :meth:`send` coroutine will block until batch is\n            drained.\n            Default: 16384\n        linger_ms (int): The producer groups together any records that arrive\n            in between request transmissions into a single batched request.\n            Normally this occurs only under load when records arrive faster\n            than they can be sent out. However in some circumstances the client\n            may want to reduce the number of requests even under moderate load.\n            This setting accomplishes this by adding a small amount of\n            artificial delay; that is, if first request is processed faster,\n            than `linger_ms`, producer will wait ``linger_ms - process_time``.\n            Default: 0 (i.e. no delay).\n        partitioner (Callable): Callable used to determine which partition\n            each message is assigned to. Called (after key serialization):\n            ``partitioner(key_bytes, all_partitions, available_partitions)``.\n            The default partitioner implementation hashes each non-None key\n            using the same murmur2 algorithm as the Java client so that\n            messages with the same key are assigned to the same partition.\n            When a key is :data:`None`, the message is delivered to a random partition\n            (filtered to partitions with available leaders only, if possible).\n        max_request_size (int): The maximum size of a request. This is also\n            effectively a cap on the maximum record size. Note that the server\n            has its own cap on record size which may be different from this.\n            This setting will limit the number of record batches the producer\n            will send in a single request to avoid sending huge requests.\n            Default: 1048576.\n        metadata_max_age_ms (int): The period of time in milliseconds after\n            which we force a refresh of metadata even if we haven't seen any\n            partition leadership changes to proactively discover any new\n            brokers or partitions. Default: 300000\n        request_timeout_ms (int): Produce request timeout in milliseconds.\n            As it's sent as part of\n            :class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\n            call), maximum waiting time can be up to ``2 *\n            request_timeout_ms``.\n            Default: 40000.\n        retry_backoff_ms (int): Milliseconds to backoff when retrying on\n            errors. Default: 100.\n        api_version (str): specify which kafka API version to use.\n            If set to ``auto``, will attempt to infer the broker version by\n            probing various APIs. Default: ``auto``\n        security_protocol (str): Protocol used to communicate with brokers.\n            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n            Default: ``PLAINTEXT``.\n        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\n            for wrapping socket connections. Directly passed into asyncio's\n            :meth:`~asyncio.loop.create_connection`. For more\n            information see :ref:`ssl_auth`.\n            Default: :data:`None`\n        connections_max_idle_ms (int): Close idle connections after the number\n            of milliseconds specified by this config. Specifying :data:`None` will\n            disable idle checks. Default: 540000 (9 minutes).\n        enable_idempotence (bool): When set to :data:`True`, the producer will\n            ensure that exactly one copy of each message is written in the\n            stream. If :data:`False`, producer retries due to broker failures,\n            etc., may write duplicates of the retried message in the stream.\n            Note that enabling idempotence acks to set to ``all``. If it is not\n            explicitly set by the user it will be chosen. If incompatible\n            values are set, a :exc:`ValueError` will be thrown.\n            New in version 0.5.0.\n        sasl_mechanism (str): Authentication mechanism when security_protocol\n            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\n            are: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n            ``OAUTHBEARER``.\n            Default: ``PLAIN``\n        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\n            Default: :data:`None`\n        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\n            Default: :data:`None`\n        sasl_oauth_token_provider (: class:`~aiokafka.abc.AbstractTokenProvider`):\n            OAuthBearer token provider instance. (See\n            :mod:`kafka.oauth.abstract`).\n            Default: :data:`None`\n    \"\"\"\n    confluent_config = {k.replace(\"_\", \".\"): v for k, v in kwargs.items()}\n    for k1, k2 in zip(\n        [\"sasl.plain.username\", \"sasl.plain.password\"],\n        [\"sasl.username\", \"sasl.password\"],\n    ):\n        if k1 in confluent_config:\n            confluent_config[k2] = confluent_config.pop(k1)\n\n    if \"ssl.context\" in confluent_config:\n        confluent_config.pop(\"ssl.context\")\n\n    return confluent_config\n\n# %% ../nbs/999_Helpers.ipynb 11\ndef confluent2aiokafka(confluent_config: Dict[str, Any]) -> Dict[str, Any]:\n    \"\"\"Converts AIOKafka styled config dictionary into Confluence styled one\n\n    Args:\n        confluent_config: Confluence styled config dictionary\n\n    Returns:\n        AIOKafka styled config dictionary\n    \"\"\"\n\n    aiokafka_config = {k.replace(\".\", \"_\"): v for k, v in confluent_config.items()}\n    for k1, k2 in zip(\n        [\"sasl_username\", \"sasl_password\"],\n        [\"sasl_plain_username\", \"sasl_plain_password\"],\n    ):\n        if k1 in aiokafka_config:\n            aiokafka_config[k2] = aiokafka_config.pop(k1)\n\n    if \"sasl_plain_username\" in aiokafka_config:\n        aiokafka_config[\"ssl.context\"] = (create_ssl_context(),)\n\n    return aiokafka_config\n\n# %% ../nbs/999_Helpers.ipynb 14\n@delegates(AIOKafkaProducer)\nasync def produce_messages(  # type: ignore\n    *,\n    topic: str,\n    msgs: List[Any],\n    **kwargs: Dict[str, Any],\n) -> List[RecordMetadata]:\n    \"\"\"Produces messages to Kafka topic\n\n    Args:\n        topic: Topic name\n        msgs: a list of messages to produce\n        bootstrap_servers (str, list(str)): a ``host[:port]`` string or list of\n            ``host[:port]`` strings that the producer should contact to\n            bootstrap initial cluster metadata. This does not have to be the\n            full node list.  It just needs to have at least one broker that will\n            respond to a Metadata API Request. Default port is 9092. If no\n            servers are specified, will default to ``localhost:9092``.\n        client_id (str): a name for this client. This string is passed in\n            each request to servers and can be used to identify specific\n            server-side log entries that correspond to this client.\n            Default: ``aiokafka-producer-#`` (appended with a unique number\n            per instance)\n        key_serializer (Callable): used to convert user-supplied keys to bytes\n            If not :data:`None`, called as ``f(key),`` should return\n            :class:`bytes`.\n            Default: :data:`None`.\n        value_serializer (Callable): used to convert user-supplied message\n            values to :class:`bytes`. If not :data:`None`, called as\n            ``f(value)``, should return :class:`bytes`.\n            Default: :data:`None`.\n        acks (Any): one of ``0``, ``1``, ``all``. The number of acknowledgments\n            the producer requires the leader to have received before considering a\n            request complete. This controls the durability of records that are\n            sent. The following settings are common:\n\n            * ``0``: Producer will not wait for any acknowledgment from the server\n              at all. The message will immediately be added to the socket\n              buffer and considered sent. No guarantee can be made that the\n              server has received the record in this case, and the retries\n              configuration will not take effect (as the client won't\n              generally know of any failures). The offset given back for each\n              record will always be set to -1.\n            * ``1``: The broker leader will write the record to its local log but\n              will respond without awaiting full acknowledgement from all\n              followers. In this case should the leader fail immediately\n              after acknowledging the record but before the followers have\n              replicated it then the record will be lost.\n            * ``all``: The broker leader will wait for the full set of in-sync\n              replicas to acknowledge the record. This guarantees that the\n              record will not be lost as long as at least one in-sync replica\n              remains alive. This is the strongest available guarantee.\n\n            If unset, defaults to ``acks=1``. If `enable_idempotence` is\n            :data:`True` defaults to ``acks=all``\n        compression_type (str): The compression type for all data generated by\n            the producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\n            or :data:`None`.\n            Compression is of full batches of data, so the efficacy of batching\n            will also impact the compression ratio (more batching means better\n            compression). Default: :data:`None`.\n        max_batch_size (int): Maximum size of buffered data per partition.\n            After this amount :meth:`send` coroutine will block until batch is\n            drained.\n            Default: 16384\n        linger_ms (int): The producer groups together any records that arrive\n            in between request transmissions into a single batched request.\n            Normally this occurs only under load when records arrive faster\n            than they can be sent out. However in some circumstances the client\n            may want to reduce the number of requests even under moderate load.\n            This setting accomplishes this by adding a small amount of\n            artificial delay; that is, if first request is processed faster,\n            than `linger_ms`, producer will wait ``linger_ms - process_time``.\n            Default: 0 (i.e. no delay).\n        partitioner (Callable): Callable used to determine which partition\n            each message is assigned to. Called (after key serialization):\n            ``partitioner(key_bytes, all_partitions, available_partitions)``.\n            The default partitioner implementation hashes each non-None key\n            using the same murmur2 algorithm as the Java client so that\n            messages with the same key are assigned to the same partition.\n            When a key is :data:`None`, the message is delivered to a random partition\n            (filtered to partitions with available leaders only, if possible).\n        max_request_size (int): The maximum size of a request. This is also\n            effectively a cap on the maximum record size. Note that the server\n            has its own cap on record size which may be different from this.\n            This setting will limit the number of record batches the producer\n            will send in a single request to avoid sending huge requests.\n            Default: 1048576.\n        metadata_max_age_ms (int): The period of time in milliseconds after\n            which we force a refresh of metadata even if we haven't seen any\n            partition leadership changes to proactively discover any new\n            brokers or partitions. Default: 300000\n        request_timeout_ms (int): Produce request timeout in milliseconds.\n            As it's sent as part of\n            :class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\n            call), maximum waiting time can be up to ``2 *\n            request_timeout_ms``.\n            Default: 40000.\n        retry_backoff_ms (int): Milliseconds to backoff when retrying on\n            errors. Default: 100.\n        api_version (str): specify which kafka API version to use.\n            If set to ``auto``, will attempt to infer the broker version by\n            probing various APIs. Default: ``auto``\n        security_protocol (str): Protocol used to communicate with brokers.\n            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n            Default: ``PLAINTEXT``.\n        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\n            for wrapping socket connections. Directly passed into asyncio's\n            :meth:`~asyncio.loop.create_connection`. For more\n            information see :ref:`ssl_auth`.\n            Default: :data:`None`\n        connections_max_idle_ms (int): Close idle connections after the number\n            of milliseconds specified by this config. Specifying :data:`None` will\n            disable idle checks. Default: 540000 (9 minutes).\n        enable_idempotence (bool): When set to :data:`True`, the producer will\n            ensure that exactly one copy of each message is written in the\n            stream. If :data:`False`, producer retries due to broker failures,\n            etc., may write duplicates of the retried message in the stream.\n            Note that enabling idempotence acks to set to ``all``. If it is not\n            explicitly set by the user it will be chosen. If incompatible\n            values are set, a :exc:`ValueError` will be thrown.\n            New in version 0.5.0.\n        sasl_mechanism (str): Authentication mechanism when security_protocol\n            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\n            are: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n            ``OAUTHBEARER``.\n            Default: ``PLAIN``\n        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\n            Default: :data:`None`\n        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\n            Default: :data:`None`\n        sasl_oauth_token_provider (: class:`~aiokafka.abc.AbstractTokenProvider`):\n            OAuthBearer token provider instance. (See\n            :mod:`kafka.oauth.abstract`).\n            Default: :data:`None`\n    \"\"\"\n    p = AIOKafkaProducer(**kwargs)\n    await p.start()\n\n    try:\n\n        def prepare_msg(msg: Any) -> bytes:\n            if isinstance(msg, bytes):\n                return msg\n            elif isinstance(msg, str):\n                return msg.encode(\"utf-8\")\n            elif hasattr(msg, \"json\"):\n                return msg.json().encode(\"utf-8\")  # type: ignore\n            return json.dumps(msg).encode(\"utf-8\")\n\n        fx = [\n            await p.send(topic, prepare_msg(msg))\n            for msg in tqdm(msgs, desc=f\"producing to '{topic}'\")\n        ]\n        delivery = [await f for f in fx]\n        return delivery\n    finally:\n        await p.stop()\n\n# %% ../nbs/999_Helpers.ipynb 17\n@delegates(AIOKafkaConsumer)\nasync def consumes_messages(\n    *,\n    topic: str,\n    msgs_count: int,\n    **kwargs: Dict[str, Any],\n) -> None:\n    \"\"\"Consumes messages\n    Args:\n        topic: Topic name\n        msgs_count: number of messages to consume before returning\n        *topics (list(str)): optional list of topics to subscribe to. If not set,\n            call :meth:`.subscribe` or :meth:`.assign` before consuming records.\n            Passing topics directly is same as calling :meth:`.subscribe` API.\n        bootstrap_servers (str, list(str)): a ``host[:port]`` string (or list of\n            ``host[:port]`` strings) that the consumer should contact to bootstrap\n            initial cluster metadata.\n\n            This does not have to be the full node list.\n            It just needs to have at least one broker that will respond to a\n            Metadata API Request. Default port is 9092. If no servers are\n            specified, will default to ``localhost:9092``.\n        client_id (str): a name for this client. This string is passed in\n            each request to servers and can be used to identify specific\n            server-side log entries that correspond to this client. Also\n            submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\n            for logging with respect to consumer group administration. Default:\n            ``aiokafka-{version}``\n        group_id (str or None): name of the consumer group to join for dynamic\n            partition assignment (if enabled), and to use for fetching and\n            committing offsets. If None, auto-partition assignment (via\n            group coordinator) and offset commits are disabled.\n            Default: None\n        key_deserializer (Callable): Any callable that takes a\n            raw message key and returns a deserialized key.\n        value_deserializer (Callable, Optional): Any callable that takes a\n            raw message value and returns a deserialized value.\n        fetch_min_bytes (int): Minimum amount of data the server should\n            return for a fetch request, otherwise wait up to\n            `fetch_max_wait_ms` for more data to accumulate. Default: 1.\n        fetch_max_bytes (int): The maximum amount of data the server should\n            return for a fetch request. This is not an absolute maximum, if\n            the first message in the first non-empty partition of the fetch\n            is larger than this value, the message will still be returned\n            to ensure that the consumer can make progress. NOTE: consumer\n            performs fetches to multiple brokers in parallel so memory\n            usage will depend on the number of brokers containing\n            partitions for the topic.\n            Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n        fetch_max_wait_ms (int): The maximum amount of time in milliseconds\n            the server will block before answering the fetch request if\n            there isn't sufficient data to immediately satisfy the\n            requirement given by fetch_min_bytes. Default: 500.\n        max_partition_fetch_bytes (int): The maximum amount of data\n            per-partition the server will return. The maximum total memory\n            used for a request ``= #partitions * max_partition_fetch_bytes``.\n            This size must be at least as large as the maximum message size\n            the server allows or else it is possible for the producer to\n            send messages larger than the consumer can fetch. If that\n            happens, the consumer can get stuck trying to fetch a large\n            message on a certain partition. Default: 1048576.\n        max_poll_records (int): The maximum number of records returned in a\n            single call to :meth:`.getmany`. Defaults ``None``, no limit.\n        request_timeout_ms (int): Client request timeout in milliseconds.\n            Default: 40000.\n        retry_backoff_ms (int): Milliseconds to backoff when retrying on\n            errors. Default: 100.\n        auto_offset_reset (str): A policy for resetting offsets on\n            :exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\n            available message, ``latest`` will move to the most recent, and\n            ``none`` will raise an exception so you can handle this case.\n            Default: ``latest``.\n        enable_auto_commit (bool): If true the consumer's offset will be\n            periodically committed in the background. Default: True.\n        auto_commit_interval_ms (int): milliseconds between automatic\n            offset commits, if enable_auto_commit is True. Default: 5000.\n        check_crcs (bool): Automatically check the CRC32 of the records\n            consumed. This ensures no on-the-wire or on-disk corruption to\n            the messages occurred. This check adds some overhead, so it may\n            be disabled in cases seeking extreme performance. Default: True\n        metadata_max_age_ms (int): The period of time in milliseconds after\n            which we force a refresh of metadata even if we haven't seen any\n            partition leadership changes to proactively discover any new\n            brokers or partitions. Default: 300000\n        partition_assignment_strategy (list): List of objects to use to\n            distribute partition ownership amongst consumer instances when\n            group management is used. This preference is implicit in the order\n            of the strategies in the list. When assignment strategy changes:\n            to support a change to the assignment strategy, new versions must\n            enable support both for the old assignment strategy and the new\n            one. The coordinator will choose the old assignment strategy until\n            all members have been updated. Then it will choose the new\n            strategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n        max_poll_interval_ms (int): Maximum allowed time between calls to\n            consume messages (e.g., :meth:`.getmany`). If this interval\n            is exceeded the consumer is considered failed and the group will\n            rebalance in order to reassign the partitions to another consumer\n            group member. If API methods block waiting for messages, that time\n            does not count against this timeout. See `KIP-62`_ for more\n            information. Default 300000\n        rebalance_timeout_ms (int): The maximum time server will wait for this\n            consumer to rejoin the group in a case of rebalance. In Java client\n            this behaviour is bound to `max.poll.interval.ms` configuration,\n            but as ``aiokafka`` will rejoin the group in the background, we\n            decouple this setting to allow finer tuning by users that use\n            :class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\n            to ``session_timeout_ms``\n        session_timeout_ms (int): Client group session and failure detection\n            timeout. The consumer sends periodic heartbeats\n            (`heartbeat.interval.ms`) to indicate its liveness to the broker.\n            If no hearts are received by the broker for a group member within\n            the session timeout, the broker will remove the consumer from the\n            group and trigger a rebalance. The allowed range is configured with\n            the **broker** configuration properties\n            `group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\n            Default: 10000\n        heartbeat_interval_ms (int): The expected time in milliseconds\n            between heartbeats to the consumer coordinator when using\n            Kafka's group management feature. Heartbeats are used to ensure\n            that the consumer's session stays active and to facilitate\n            rebalancing when new consumers join or leave the group. The\n            value must be set lower than `session_timeout_ms`, but typically\n            should be set no higher than 1/3 of that value. It can be\n            adjusted even lower to control the expected time for normal\n            rebalances. Default: 3000\n        consumer_timeout_ms (int): maximum wait timeout for background fetching\n            routine. Mostly defines how fast the system will see rebalance and\n            request new data for new partitions. Default: 200\n        api_version (str): specify which kafka API version to use.\n            :class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\n            If set to ``auto``, will attempt to infer the broker version by\n            probing various APIs. Default: ``auto``\n        security_protocol (str): Protocol used to communicate with brokers.\n            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\n            for wrapping socket connections. Directly passed into asyncio's\n            :meth:`~asyncio.loop.create_connection`. For more information see\n            :ref:`ssl_auth`. Default: None.\n        exclude_internal_topics (bool): Whether records from internal topics\n            (such as offsets) should be exposed to the consumer. If set to True\n            the only way to receive records from an internal topic is\n            subscribing to it. Requires 0.10+ Default: True\n        connections_max_idle_ms (int): Close idle connections after the number\n            of milliseconds specified by this config. Specifying `None` will\n            disable idle checks. Default: 540000 (9 minutes).\n        isolation_level (str): Controls how to read messages written\n            transactionally.\n\n            If set to ``read_committed``, :meth:`.getmany` will only return\n            transactional messages which have been committed.\n            If set to ``read_uncommitted`` (the default), :meth:`.getmany` will\n            return all messages, even transactional messages which have been\n            aborted.\n\n            Non-transactional messages will be returned unconditionally in\n            either mode.\n\n            Messages will always be returned in offset order. Hence, in\n            `read_committed` mode, :meth:`.getmany` will only return\n            messages up to the last stable offset (LSO), which is the one less\n            than the offset of the first open transaction. In particular any\n            messages appearing after messages belonging to ongoing transactions\n            will be withheld until the relevant transaction has been completed.\n            As a result, `read_committed` consumers will not be able to read up\n            to the high watermark when there are in flight transactions.\n            Further, when in `read_committed` the seek_to_end method will\n            return the LSO. See method docs below. Default: ``read_uncommitted``\n        sasl_mechanism (str): Authentication mechanism when security_protocol\n            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n            ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n            ``OAUTHBEARER``.\n            Default: ``PLAIN``\n        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\n            Default: None\n        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\n            Default: None\n        sasl_oauth_token_provider (~aiokafka.abc.AbstractTokenProvider): OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\n            Default: None\n    \"\"\"\n    consumer = AIOKafkaConsumer(topic, **kwargs)\n    await consumer.start()\n    try:\n        with tqdm(total=msgs_count, desc=f\"consuming from '{topic}'\") as pbar:\n            async for msg in consumer:\n                pbar.update(1)\n                if pbar.n >= pbar.total:\n                    break\n    finally:\n        await consumer.stop()\n\n# %% ../nbs/999_Helpers.ipynb 20\n@delegates(AIOKafkaConsumer)\n@delegates(AIOKafkaProducer, keep=True)\nasync def produce_and_consume_messages(\n    *,\n    produce_topic: str,\n    consume_topic: str,\n    msgs: List[Any],\n    msgs_count: int,\n    **kwargs: Dict[str, Any],\n) -> None:\n    \"\"\"produce_and_consume_messages\n\n    Args:\n        produce_topic: Topic name for producing messages\n        consume_topic: Topic name for consuming messages\n        msgs: a list of messages to produce\n        msgs_count: number of messages to consume before returning\n        bootstrap_servers (str, list(str)): a ``host[:port]`` string (or list of\n            ``host[:port]`` strings) that the consumer should contact to bootstrap\n            initial cluster metadata.\n\n            This does not have to be the full node list.\n            It just needs to have at least one broker that will respond to a\n            Metadata API Request. Default port is 9092. If no servers are\n            specified, will default to ``localhost:9092``.\n        client_id (str): a name for this client. This string is passed in\n            each request to servers and can be used to identify specific\n            server-side log entries that correspond to this client. Also\n            submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\n            for logging with respect to consumer group administration. Default:\n            ``aiokafka-{version}``\n        group_id (str or None): name of the consumer group to join for dynamic\n            partition assignment (if enabled), and to use for fetching and\n            committing offsets. If None, auto-partition assignment (via\n            group coordinator) and offset commits are disabled.\n            Default: None\n        key_deserializer (Callable): Any callable that takes a\n            raw message key and returns a deserialized key.\n        value_deserializer (Callable, Optional): Any callable that takes a\n            raw message value and returns a deserialized value.\n        fetch_min_bytes (int): Minimum amount of data the server should\n            return for a fetch request, otherwise wait up to\n            `fetch_max_wait_ms` for more data to accumulate. Default: 1.\n        fetch_max_bytes (int): The maximum amount of data the server should\n            return for a fetch request. This is not an absolute maximum, if\n            the first message in the first non-empty partition of the fetch\n            is larger than this value, the message will still be returned\n            to ensure that the consumer can make progress. NOTE: consumer\n            performs fetches to multiple brokers in parallel so memory\n            usage will depend on the number of brokers containing\n            partitions for the topic.\n            Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\n        fetch_max_wait_ms (int): The maximum amount of time in milliseconds\n            the server will block before answering the fetch request if\n            there isn't sufficient data to immediately satisfy the\n            requirement given by fetch_min_bytes. Default: 500.\n        max_partition_fetch_bytes (int): The maximum amount of data\n            per-partition the server will return. The maximum total memory\n            used for a request ``= #partitions * max_partition_fetch_bytes``.\n            This size must be at least as large as the maximum message size\n            the server allows or else it is possible for the producer to\n            send messages larger than the consumer can fetch. If that\n            happens, the consumer can get stuck trying to fetch a large\n            message on a certain partition. Default: 1048576.\n        max_poll_records (int): The maximum number of records returned in a\n            single call to :meth:`.getmany`. Defaults ``None``, no limit.\n        request_timeout_ms (int): Client request timeout in milliseconds.\n            Default: 40000.\n        retry_backoff_ms (int): Milliseconds to backoff when retrying on\n            errors. Default: 100.\n        auto_offset_reset (str): A policy for resetting offsets on\n            :exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\n            available message, ``latest`` will move to the most recent, and\n            ``none`` will raise an exception so you can handle this case.\n            Default: ``latest``.\n        enable_auto_commit (bool): If true the consumer's offset will be\n            periodically committed in the background. Default: True.\n        auto_commit_interval_ms (int): milliseconds between automatic\n            offset commits, if enable_auto_commit is True. Default: 5000.\n        check_crcs (bool): Automatically check the CRC32 of the records\n            consumed. This ensures no on-the-wire or on-disk corruption to\n            the messages occurred. This check adds some overhead, so it may\n            be disabled in cases seeking extreme performance. Default: True\n        metadata_max_age_ms (int): The period of time in milliseconds after\n            which we force a refresh of metadata even if we haven't seen any\n            partition leadership changes to proactively discover any new\n            brokers or partitions. Default: 300000\n        partition_assignment_strategy (list): List of objects to use to\n            distribute partition ownership amongst consumer instances when\n            group management is used. This preference is implicit in the order\n            of the strategies in the list. When assignment strategy changes:\n            to support a change to the assignment strategy, new versions must\n            enable support both for the old assignment strategy and the new\n            one. The coordinator will choose the old assignment strategy until\n            all members have been updated. Then it will choose the new\n            strategy. Default: [:class:`.RoundRobinPartitionAssignor`]\n        max_poll_interval_ms (int): Maximum allowed time between calls to\n            consume messages (e.g., :meth:`.getmany`). If this interval\n            is exceeded the consumer is considered failed and the group will\n            rebalance in order to reassign the partitions to another consumer\n            group member. If API methods block waiting for messages, that time\n            does not count against this timeout. See `KIP-62`_ for more\n            information. Default 300000\n        rebalance_timeout_ms (int): The maximum time server will wait for this\n            consumer to rejoin the group in a case of rebalance. In Java client\n            this behaviour is bound to `max.poll.interval.ms` configuration,\n            but as ``aiokafka`` will rejoin the group in the background, we\n            decouple this setting to allow finer tuning by users that use\n            :class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\n            to ``session_timeout_ms``\n        session_timeout_ms (int): Client group session and failure detection\n            timeout. The consumer sends periodic heartbeats\n            (`heartbeat.interval.ms`) to indicate its liveness to the broker.\n            If no hearts are received by the broker for a group member within\n            the session timeout, the broker will remove the consumer from the\n            group and trigger a rebalance. The allowed range is configured with\n            the **broker** configuration properties\n            `group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\n            Default: 10000\n        heartbeat_interval_ms (int): The expected time in milliseconds\n            between heartbeats to the consumer coordinator when using\n            Kafka's group management feature. Heartbeats are used to ensure\n            that the consumer's session stays active and to facilitate\n            rebalancing when new consumers join or leave the group. The\n            value must be set lower than `session_timeout_ms`, but typically\n            should be set no higher than 1/3 of that value. It can be\n            adjusted even lower to control the expected time for normal\n            rebalances. Default: 3000\n        consumer_timeout_ms (int): maximum wait timeout for background fetching\n            routine. Mostly defines how fast the system will see rebalance and\n            request new data for new partitions. Default: 200\n        api_version (str): specify which kafka API version to use.\n            :class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\n            If set to ``auto``, will attempt to infer the broker version by\n            probing various APIs. Default: ``auto``\n        security_protocol (str): Protocol used to communicate with brokers.\n            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\n        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\n            for wrapping socket connections. Directly passed into asyncio's\n            :meth:`~asyncio.loop.create_connection`. For more information see\n            :ref:`ssl_auth`. Default: None.\n        exclude_internal_topics (bool): Whether records from internal topics\n            (such as offsets) should be exposed to the consumer. If set to True\n            the only way to receive records from an internal topic is\n            subscribing to it. Requires 0.10+ Default: True\n        connections_max_idle_ms (int): Close idle connections after the number\n            of milliseconds specified by this config. Specifying `None` will\n            disable idle checks. Default: 540000 (9 minutes).\n        isolation_level (str): Controls how to read messages written\n            transactionally.\n\n            If set to ``read_committed``, :meth:`.getmany` will only return\n            transactional messages which have been committed.\n            If set to ``read_uncommitted`` (the default), :meth:`.getmany` will\n            return all messages, even transactional messages which have been\n            aborted.\n\n            Non-transactional messages will be returned unconditionally in\n            either mode.\n\n            Messages will always be returned in offset order. Hence, in\n            `read_committed` mode, :meth:`.getmany` will only return\n            messages up to the last stable offset (LSO), which is the one less\n            than the offset of the first open transaction. In particular any\n            messages appearing after messages belonging to ongoing transactions\n            will be withheld until the relevant transaction has been completed.\n            As a result, `read_committed` consumers will not be able to read up\n            to the high watermark when there are in flight transactions.\n            Further, when in `read_committed` the seek_to_end method will\n            return the LSO. See method docs below. Default: ``read_uncommitted``\n        sasl_mechanism (str): Authentication mechanism when security_protocol\n            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\n            ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\n            ``OAUTHBEARER``.\n            Default: ``PLAIN``\n        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\n            Default: None\n        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\n            Default: None\n        sasl_oauth_token_provider (~aiokafka.abc.AbstractTokenProvider): OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\n            Default: None\n        key_serializer (Callable): used to convert user-supplied keys to bytes\n            If not :data:`None`, called as ``f(key),`` should return\n            :class:`bytes`.\n            Default: :data:`None`.\n        value_serializer (Callable): used to convert user-supplied message\n            values to :class:`bytes`. If not :data:`None`, called as\n            ``f(value)``, should return :class:`bytes`.\n            Default: :data:`None`.\n        acks (Any): one of ``0``, ``1``, ``all``. The number of acknowledgments\n            the producer requires the leader to have received before considering a\n            request complete. This controls the durability of records that are\n            sent. The following settings are common:\n\n            * ``0``: Producer will not wait for any acknowledgment from the server\n              at all. The message will immediately be added to the socket\n              buffer and considered sent. No guarantee can be made that the\n              server has received the record in this case, and the retries\n              configuration will not take effect (as the client won't\n              generally know of any failures). The offset given back for each\n              record will always be set to -1.\n            * ``1``: The broker leader will write the record to its local log but\n              will respond without awaiting full acknowledgement from all\n              followers. In this case should the leader fail immediately\n              after acknowledging the record but before the followers have\n              replicated it then the record will be lost.\n            * ``all``: The broker leader will wait for the full set of in-sync\n              replicas to acknowledge the record. This guarantees that the\n              record will not be lost as long as at least one in-sync replica\n              remains alive. This is the strongest available guarantee.\n\n            If unset, defaults to ``acks=1``. If `enable_idempotence` is\n            :data:`True` defaults to ``acks=all``\n        compression_type (str): The compression type for all data generated by\n            the producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\n            or :data:`None`.\n            Compression is of full batches of data, so the efficacy of batching\n            will also impact the compression ratio (more batching means better\n            compression). Default: :data:`None`.\n        max_batch_size (int): Maximum size of buffered data per partition.\n            After this amount :meth:`send` coroutine will block until batch is\n            drained.\n            Default: 16384\n        linger_ms (int): The producer groups together any records that arrive\n            in between request transmissions into a single batched request.\n            Normally this occurs only under load when records arrive faster\n            than they can be sent out. However in some circumstances the client\n            may want to reduce the number of requests even under moderate load.\n            This setting accomplishes this by adding a small amount of\n            artificial delay; that is, if first request is processed faster,\n            than `linger_ms`, producer will wait ``linger_ms - process_time``.\n            Default: 0 (i.e. no delay).\n        partitioner (Callable): Callable used to determine which partition\n            each message is assigned to. Called (after key serialization):\n            ``partitioner(key_bytes, all_partitions, available_partitions)``.\n            The default partitioner implementation hashes each non-None key\n            using the same murmur2 algorithm as the Java client so that\n            messages with the same key are assigned to the same partition.\n            When a key is :data:`None`, the message is delivered to a random partition\n            (filtered to partitions with available leaders only, if possible).\n        max_request_size (int): The maximum size of a request. This is also\n            effectively a cap on the maximum record size. Note that the server\n            has its own cap on record size which may be different from this.\n            This setting will limit the number of record batches the producer\n            will send in a single request to avoid sending huge requests.\n            Default: 1048576.\n        enable_idempotence (bool): When set to :data:`True`, the producer will\n            ensure that exactly one copy of each message is written in the\n            stream. If :data:`False`, producer retries due to broker failures,\n            etc., may write duplicates of the retried message in the stream.\n            Note that enabling idempotence acks to set to ``all``. If it is not\n            explicitly set by the user it will be chosen. If incompatible\n            values are set, a :exc:`ValueError` will be thrown.\n            New in version 0.5.0.\n        sasl_oauth_token_provider (: class:`~aiokafka.abc.AbstractTokenProvider`):\n            OAuthBearer token provider instance. (See\n            :mod:`kafka.oauth.abstract`).\n            Default: :data:`None`\n        *topics (list(str)): optional list of topics to subscribe to. If not set,\n            call :meth:`.subscribe` or :meth:`.assign` before consuming records.\n            Passing topics directly is same as calling :meth:`.subscribe` API.\n    \"\"\"\n    async with anyio.create_task_group() as tg:\n        tg.start_soon(\n            lambda d: produce_messages(**d),\n            dict(msgs=msgs, topic=produce_topic, **kwargs),\n        )\n        tg.start_soon(\n            lambda d: consumes_messages(**d),\n            dict(\n                msgs_count=msgs_count,\n                topic=consume_topic,\n                **kwargs,\n            ),\n        )\n\n# %% ../nbs/999_Helpers.ipynb 23\ndef get_collapsible_admonition(\n    code_block: str, *, name: Optional[str] = None\n) -> Markdown:\n    \"\"\"\n    Generate a collapsible admonition containing a code block as an example.\n\n    Args:\n        code_block: The code block to be included in the example.\n        name: Optional name or title for the example.\n            Default is None.\n\n    Returns:\n        A Markdown object representing the collapsible admonition\n        with the provided code block.\n    \"\"\"\n    alt_name = \"\" if name is None else name\n    intro = f'This example contains the content of the file \"{alt_name}\":'\n    return Markdown(\n        f\"??? Example \\n\\n    {intro}\\n\\n\"\n        + textwrap.indent(f\"```python\\n{code_block}\\n```\", prefix=\"    \")\n    )\n\n# %% ../nbs/999_Helpers.ipynb 25\ndef source2markdown(o: Union[str, Callable[..., Any]]) -> Markdown:\n    \"\"\"Converts source code into Markdown for displaying it with Jupyter notebook\n\n    Args:\n        o: source code\n    \"\"\"\n    s = inspect.getsource(o) if callable(o) else o\n    return Markdown(\n        f\"\"\"\n```python\n{s}\n```\n\"\"\"\n    )\n\n# %% ../nbs/999_Helpers.ipynb 27\nasync def wait_for_get_url(\n    url: str, timeout: Optional[int] = None, **kwargs: Dict[str, Any]\n) -> aiohttp.ClientResponse:\n    \"\"\"\n    Asynchronously wait for a GET request to a specified URL with an optional timeout.\n\n    Args:\n        url: The URL to send the GET request to.\n        timeout: Optional maximum number of seconds to wait\n            for a response. If not provided, there is no timeout. Default is None.\n        **kwargs: Additional keyword arguments to be passed to the tqdm progress bar,\n            if a timeout is provided.\n\n    Returns:\n        The aiohttp.ClientResponse response object for the GET request.\n\n    Raises:\n        TimeoutError: If the timeout is reached and the URL couldn't be fetched within\n            the specified time.\n    \"\"\"\n    t0 = datetime.now()\n    if timeout is not None:\n        pbar = tqdm(total=timeout, **kwargs)\n    try:\n        async with aiohttp.ClientSession() as session:\n            while True:\n                try:\n                    async with session.get(url) as response:\n                        if timeout is not None:\n                            pbar.update(pbar.total - pbar.n)\n                        return response\n                except aiohttp.ClientConnectorError as e:\n                    if timeout is not None:\n                        if pbar.total - pbar.n > 1:\n                            pbar.update(1)\n                    await asyncio.sleep(1)\n\n                if timeout is not None and datetime.now() - t0 >= timedelta(\n                    seconds=timeout\n                ):\n                    raise TimeoutError(\n                        f\"Could not fetch url '{url}' for more than {timeout} seconds\"\n                    )\n    finally:\n        if timeout is not None:\n            pbar.close()\n"
  },
  {
    "path": "fastkafka/_modidx.py",
    "content": "# Autogenerated by nbdev\n\nd = { 'settings': { 'branch': 'main',\n                'doc_baseurl': '/fastkafka',\n                'doc_host': 'https://airtai.github.io',\n                'git_url': 'https://github.com/airtai/fastkafka',\n                'lib_path': 'fastkafka'},\n  'syms': { 'fastkafka._application.app': { 'fastkafka._application.app.AwaitedMock': ( 'fastkafka.html#awaitedmock',\n                                                                                        'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.AwaitedMock.__init__': ( 'fastkafka.html#awaitedmock.__init__',\n                                                                                                 'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.AwaitedMock._await_for': ( 'fastkafka.html#awaitedmock._await_for',\n                                                                                                   'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka': ( 'fastkafka.html#fastkafka',\n                                                                                      'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.__aenter__': ( 'fastkafka.html#fastkafka.__aenter__',\n                                                                                                 'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.__aexit__': ( 'fastkafka.html#fastkafka.__aexit__',\n                                                                                                'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.__init__': ( 'fastkafka.html#fastkafka.__init__',\n                                                                                               'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._populate_bg_tasks': ( 'fastkafka.html#fastkafka._populate_bg_tasks',\n                                                                                                         'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._populate_consumers': ( 'fastkafka.html#fastkafka._populate_consumers',\n                                                                                                          'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._populate_producers': ( 'fastkafka.html#fastkafka._populate_producers',\n                                                                                                          'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._shutdown_bg_tasks': ( 'fastkafka.html#fastkafka._shutdown_bg_tasks',\n                                                                                                         'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._shutdown_consumers': ( 'fastkafka.html#fastkafka._shutdown_consumers',\n                                                                                                          'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._shutdown_producers': ( 'fastkafka.html#fastkafka._shutdown_producers',\n                                                                                                          'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._start': ( 'fastkafka.html#fastkafka._start',\n                                                                                             'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka._stop': ( 'fastkafka.html#fastkafka._stop',\n                                                                                            'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.benchmark': ( 'fastkafka.html#fastkafka.benchmark',\n                                                                                                'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.consumes': ( 'fastkafka.html#fastkafka.consumes',\n                                                                                               'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.create_docs': ( 'fastkafka.html#fastkafka.create_docs',\n                                                                                                  'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.create_mocks': ( 'fastkafka.html#fastkafka.create_mocks',\n                                                                                                   'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.fastapi_lifespan': ( 'fastkafka.html#fastkafka.fastapi_lifespan',\n                                                                                                       'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.get_topics': ( 'fastkafka.html#fastkafka.get_topics',\n                                                                                                 'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.is_started': ( 'fastkafka.html#fastkafka.is_started',\n                                                                                                 'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.produces': ( 'fastkafka.html#fastkafka.produces',\n                                                                                               'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.run_in_background': ( 'fastkafka.html#fastkafka.run_in_background',\n                                                                                                        'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app.FastKafka.set_kafka_broker': ( 'fastkafka.html#fastkafka.set_kafka_broker',\n                                                                                                       'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._create_producer': ( 'fastkafka.html#_create_producer',\n                                                                                             'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._get_broker_addr_list': ( 'fastkafka.html#_get_broker_addr_list',\n                                                                                                  'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._get_contact_info': ( 'fastkafka.html#_get_contact_info',\n                                                                                              'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._get_decoder_fn': ( 'fastkafka.html#_get_decoder_fn',\n                                                                                            'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._get_encoder_fn': ( 'fastkafka.html#_get_encoder_fn',\n                                                                                            'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._get_kafka_brokers': ( 'fastkafka.html#_get_kafka_brokers',\n                                                                                               'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._get_kafka_config': ( 'fastkafka.html#_get_kafka_config',\n                                                                                              'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._get_topic_name': ( 'fastkafka.html#_get_topic_name',\n                                                                                            'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._prepare_and_check_brokers': ( 'fastkafka.html#_prepare_and_check_brokers',\n                                                                                                       'fastkafka/_application/app.py'),\n                                            'fastkafka._application.app._resolve_key': ( 'fastkafka.html#_resolve_key',\n                                                                                         'fastkafka/_application/app.py')},\n            'fastkafka._application.tester': { 'fastkafka._application.tester.AmbiguousWarning': ( 'tester.html#ambiguouswarning',\n                                                                                                   'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.AmbiguousWarning.__call__': ( 'tester.html#ambiguouswarning.__call__',\n                                                                                                            'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.AmbiguousWarning.__getattribute__': ( 'tester.html#ambiguouswarning.__getattribute__',\n                                                                                                                    'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.AmbiguousWarning.__init__': ( 'tester.html#ambiguouswarning.__init__',\n                                                                                                            'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester': ( 'tester.html#tester',\n                                                                                         'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester.__aenter__': ( 'tester.html#tester.__aenter__',\n                                                                                                    'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester.__aexit__': ( 'tester.html#tester.__aexit__',\n                                                                                                   'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester.__init__': ( 'tester.html#tester.__init__',\n                                                                                                  'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester._arrange_mirrors': ( 'tester.html#tester._arrange_mirrors',\n                                                                                                          'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester._create_ctx': ( 'tester.html#tester._create_ctx',\n                                                                                                     'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester._create_mirrors': ( 'tester.html#tester._create_mirrors',\n                                                                                                         'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester._restore_initial_arguments': ( 'tester.html#tester._restore_initial_arguments',\n                                                                                                                    'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester._set_arguments_and_return_old': ( 'tester.html#tester._set_arguments_and_return_old',\n                                                                                                                       'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester._start_tester': ( 'tester.html#tester._start_tester',\n                                                                                                       'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester._stop_tester': ( 'tester.html#tester._stop_tester',\n                                                                                                      'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester.using_external_broker': ( 'tester.html#tester.using_external_broker',\n                                                                                                               'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.Tester.using_inmemory_broker': ( 'tester.html#tester.using_inmemory_broker',\n                                                                                                               'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester._get_broker_spec': ( 'tester.html#_get_broker_spec',\n                                                                                                   'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.mirror_consumer': ( 'tester.html#mirror_consumer',\n                                                                                                  'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.mirror_producer': ( 'tester.html#mirror_producer',\n                                                                                                  'fastkafka/_application/tester.py'),\n                                               'fastkafka._application.tester.set_sugar': ( 'tester.html#set_sugar',\n                                                                                            'fastkafka/_application/tester.py')},\n            'fastkafka._components.aiokafka_consumer_loop': { 'fastkafka._components.aiokafka_consumer_loop.EventMetadata': ( 'consumerloop.html#eventmetadata',\n                                                                                                                              'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop.EventMetadata.create_event_metadata': ( 'consumerloop.html#eventmetadata.create_event_metadata',\n                                                                                                                                                    'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop._aiokafka_consumer_loop': ( 'consumerloop.html#_aiokafka_consumer_loop',\n                                                                                                                                        'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop._callback_parameters_wrapper': ( 'consumerloop.html#_callback_parameters_wrapper',\n                                                                                                                                             'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop._get_batch_msg_handlers': ( 'consumerloop.html#_get_batch_msg_handlers',\n                                                                                                                                        'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop._get_single_msg_handlers': ( 'consumerloop.html#_get_single_msg_handlers',\n                                                                                                                                         'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop._prepare_callback': ( 'consumerloop.html#_prepare_callback',\n                                                                                                                                  'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop.aiokafka_consumer_loop': ( 'consumerloop.html#aiokafka_consumer_loop',\n                                                                                                                                       'fastkafka/_components/aiokafka_consumer_loop.py'),\n                                                              'fastkafka._components.aiokafka_consumer_loop.sanitize_kafka_config': ( 'consumerloop.html#sanitize_kafka_config',\n                                                                                                                                      'fastkafka/_components/aiokafka_consumer_loop.py')},\n            'fastkafka._components.asyncapi': { 'fastkafka._components.asyncapi.APIKeyLocation': ( 'asyncapi.html#apikeylocation',\n                                                                                                   'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.ContactInfo': ( 'asyncapi.html#contactinfo',\n                                                                                                'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaBroker': ( 'asyncapi.html#kafkabroker',\n                                                                                                'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaBroker.model_dump': ( 'asyncapi.html#kafkabroker.model_dump',\n                                                                                                           'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaBroker.model_dump_json': ( 'asyncapi.html#kafkabroker.model_dump_json',\n                                                                                                                'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaBrokers': ( 'asyncapi.html#kafkabrokers',\n                                                                                                 'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaBrokers.model_dump': ( 'asyncapi.html#kafkabrokers.model_dump',\n                                                                                                            'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaBrokers.model_dump_json': ( 'asyncapi.html#kafkabrokers.model_dump_json',\n                                                                                                                 'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaMessage': ( 'asyncapi.html#kafkamessage',\n                                                                                                 'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.KafkaServiceInfo': ( 'asyncapi.html#kafkaserviceinfo',\n                                                                                                     'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.SecuritySchema': ( 'asyncapi.html#securityschema',\n                                                                                                   'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.SecuritySchema.__init__': ( 'asyncapi.html#securityschema.__init__',\n                                                                                                            'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.SecuritySchema.model_dump': ( 'asyncapi.html#securityschema.model_dump',\n                                                                                                              'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.SecuritySchema.model_dump_json': ( 'asyncapi.html#securityschema.model_dump_json',\n                                                                                                                   'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.SecurityType': ( 'asyncapi.html#securitytype',\n                                                                                                 'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._add_example_to_msg_definitions': ( 'asyncapi.html#_add_example_to_msg_definitions',\n                                                                                                                    'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._generate_async_docs': ( 'asyncapi.html#_generate_async_docs',\n                                                                                                         'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._generate_async_spec': ( 'asyncapi.html#_generate_async_spec',\n                                                                                                         'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_asyncapi_schema': ( 'asyncapi.html#_get_asyncapi_schema',\n                                                                                                         'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_channels_schema': ( 'asyncapi.html#_get_channels_schema',\n                                                                                                         'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_components_schema': ( 'asyncapi.html#_get_components_schema',\n                                                                                                           'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_example': ( 'asyncapi.html#_get_example',\n                                                                                                 'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_kafka_msg_classes': ( 'asyncapi.html#_get_kafka_msg_classes',\n                                                                                                           'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_kafka_msg_definitions': ( 'asyncapi.html#_get_kafka_msg_definitions',\n                                                                                                               'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_msg_cls_for_consumer': ( 'asyncapi.html#_get_msg_cls_for_consumer',\n                                                                                                              'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_msg_cls_for_producer': ( 'asyncapi.html#_get_msg_cls_for_producer',\n                                                                                                              'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_msg_definitions_with_examples': ( 'asyncapi.html#_get_msg_definitions_with_examples',\n                                                                                                                       'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_security_schemes': ( 'asyncapi.html#_get_security_schemes',\n                                                                                                          'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_servers_schema': ( 'asyncapi.html#_get_servers_schema',\n                                                                                                        'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi._get_topic_dict': ( 'asyncapi.html#_get_topic_dict',\n                                                                                                    'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.export_async_spec': ( 'asyncapi.html#export_async_spec',\n                                                                                                      'fastkafka/_components/asyncapi.py'),\n                                                'fastkafka._components.asyncapi.yaml_file_cmp': ( 'asyncapi.html#yaml_file_cmp',\n                                                                                                  'fastkafka/_components/asyncapi.py')},\n            'fastkafka._components.benchmarking': { 'fastkafka._components.benchmarking._benchmark': ( 'benchmarking.html#_benchmark',\n                                                                                                       'fastkafka/_components/benchmarking.py')},\n            'fastkafka._components.docs_dependencies': { 'fastkafka._components.docs_dependencies._check_npm': ( 'docs_dependencies.html#_check_npm',\n                                                                                                                 'fastkafka/_components/docs_dependencies.py'),\n                                                         'fastkafka._components.docs_dependencies._check_npm_with_local': ( 'docs_dependencies.html#_check_npm_with_local',\n                                                                                                                            'fastkafka/_components/docs_dependencies.py'),\n                                                         'fastkafka._components.docs_dependencies._install_docs_npm_deps': ( 'docs_dependencies.html#_install_docs_npm_deps',\n                                                                                                                             'fastkafka/_components/docs_dependencies.py'),\n                                                         'fastkafka._components.docs_dependencies._install_node': ( 'docs_dependencies.html#_install_node',\n                                                                                                                    'fastkafka/_components/docs_dependencies.py')},\n            'fastkafka._components.encoder.avro': { 'fastkafka._components.encoder.avro.AvroBase': ( 'avro_encode_decoder.html#avrobase',\n                                                                                                     'fastkafka/_components/encoder/avro.py'),\n                                                    'fastkafka._components.encoder.avro.AvroBase._avro_schema': ( 'avro_encode_decoder.html#avrobase._avro_schema',\n                                                                                                                  'fastkafka/_components/encoder/avro.py'),\n                                                    'fastkafka._components.encoder.avro.AvroBase.avro_schema': ( 'avro_encode_decoder.html#avrobase.avro_schema',\n                                                                                                                 'fastkafka/_components/encoder/avro.py'),\n                                                    'fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_class': ( 'avro_encode_decoder.html#avrobase.avro_schema_for_pydantic_class',\n                                                                                                                                    'fastkafka/_components/encoder/avro.py'),\n                                                    'fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_object': ( 'avro_encode_decoder.html#avrobase.avro_schema_for_pydantic_object',\n                                                                                                                                     'fastkafka/_components/encoder/avro.py'),\n                                                    'fastkafka._components.encoder.avro.avro_decoder': ( 'avro_encode_decoder.html#avro_decoder',\n                                                                                                         'fastkafka/_components/encoder/avro.py'),\n                                                    'fastkafka._components.encoder.avro.avro_encoder': ( 'avro_encode_decoder.html#avro_encoder',\n                                                                                                         'fastkafka/_components/encoder/avro.py'),\n                                                    'fastkafka._components.encoder.avro.avsc_to_pydantic': ( 'avro_encode_decoder.html#avsc_to_pydantic',\n                                                                                                             'fastkafka/_components/encoder/avro.py')},\n            'fastkafka._components.encoder.json': { 'fastkafka._components.encoder.json._to_json_utf8': ( 'json_encode_decoder.html#_to_json_utf8',\n                                                                                                          'fastkafka/_components/encoder/json.py'),\n                                                    'fastkafka._components.encoder.json.json_decoder': ( 'json_encode_decoder.html#json_decoder',\n                                                                                                         'fastkafka/_components/encoder/json.py'),\n                                                    'fastkafka._components.encoder.json.json_encoder': ( 'json_encode_decoder.html#json_encoder',\n                                                                                                         'fastkafka/_components/encoder/json.py')},\n            'fastkafka._components.helpers': { 'fastkafka._components.helpers.ImportFromStringError': ( 'internal_helpers.html#importfromstringerror',\n                                                                                                        'fastkafka/_components/helpers.py'),\n                                               'fastkafka._components.helpers._import_from_string': ( 'internal_helpers.html#_import_from_string',\n                                                                                                      'fastkafka/_components/helpers.py'),\n                                               'fastkafka._components.helpers.change_dir': ( 'internal_helpers.html#change_dir',\n                                                                                             'fastkafka/_components/helpers.py'),\n                                               'fastkafka._components.helpers.in_notebook': ( 'internal_helpers.html#in_notebook',\n                                                                                              'fastkafka/_components/helpers.py'),\n                                               'fastkafka._components.helpers.remove_suffix': ( 'internal_helpers.html#remove_suffix',\n                                                                                                'fastkafka/_components/helpers.py'),\n                                               'fastkafka._components.helpers.true_after': ( 'internal_helpers.html#true_after',\n                                                                                             'fastkafka/_components/helpers.py'),\n                                               'fastkafka._components.helpers.unwrap_list_type': ( 'internal_helpers.html#unwrap_list_type',\n                                                                                                   'fastkafka/_components/helpers.py')},\n            'fastkafka._components.logger': { 'fastkafka._components.logger.cached_log': ( 'logger.html#cached_log',\n                                                                                           'fastkafka/_components/logger.py'),\n                                              'fastkafka._components.logger.get_default_logger_configuration': ( 'logger.html#get_default_logger_configuration',\n                                                                                                                 'fastkafka/_components/logger.py'),\n                                              'fastkafka._components.logger.get_logger': ( 'logger.html#get_logger',\n                                                                                           'fastkafka/_components/logger.py'),\n                                              'fastkafka._components.logger.set_level': ( 'logger.html#set_level',\n                                                                                          'fastkafka/_components/logger.py'),\n                                              'fastkafka._components.logger.suppress_timestamps': ( 'logger.html#suppress_timestamps',\n                                                                                                    'fastkafka/_components/logger.py')},\n            'fastkafka._components.meta': { 'fastkafka._components.meta._delegates_without_docs': ( 'meta.html#_delegates_without_docs',\n                                                                                                    'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta._format_args': ( 'meta.html#_format_args',\n                                                                                         'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta._get_default_kwargs_from_sig': ( 'meta.html#_get_default_kwargs_from_sig',\n                                                                                                         'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.classcontextmanager': ( 'meta.html#classcontextmanager',\n                                                                                                'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.combine_params': ( 'meta.html#combine_params',\n                                                                                           'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.copy_func': ( 'meta.html#copy_func',\n                                                                                      'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.delegates': ( 'meta.html#delegates',\n                                                                                      'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.eval_type': ( 'meta.html#eval_type',\n                                                                                      'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.export': ('meta.html#export', 'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.filter_using_signature': ( 'meta.html#filter_using_signature',\n                                                                                                   'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.get_annotations_ex': ( 'meta.html#get_annotations_ex',\n                                                                                               'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.patch': ('meta.html#patch', 'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.patch_to': ('meta.html#patch_to', 'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.test_eq': ('meta.html#test_eq', 'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.union2tuple': ( 'meta.html#union2tuple',\n                                                                                        'fastkafka/_components/meta.py'),\n                                            'fastkafka._components.meta.use_parameters_of': ( 'meta.html#use_parameters_of',\n                                                                                              'fastkafka/_components/meta.py')},\n            'fastkafka._components.producer_decorator': { 'fastkafka._components.producer_decorator.KafkaEvent': ( 'producerdecorator.html#kafkaevent',\n                                                                                                                   'fastkafka/_components/producer_decorator.py'),\n                                                          'fastkafka._components.producer_decorator._wrap_in_event': ( 'producerdecorator.html#_wrap_in_event',\n                                                                                                                       'fastkafka/_components/producer_decorator.py'),\n                                                          'fastkafka._components.producer_decorator.produce_batch': ( 'producerdecorator.html#produce_batch',\n                                                                                                                      'fastkafka/_components/producer_decorator.py'),\n                                                          'fastkafka._components.producer_decorator.produce_single': ( 'producerdecorator.html#produce_single',\n                                                                                                                       'fastkafka/_components/producer_decorator.py'),\n                                                          'fastkafka._components.producer_decorator.producer_decorator': ( 'producerdecorator.html#producer_decorator',\n                                                                                                                           'fastkafka/_components/producer_decorator.py'),\n                                                          'fastkafka._components.producer_decorator.release_callback': ( 'producerdecorator.html#release_callback',\n                                                                                                                         'fastkafka/_components/producer_decorator.py'),\n                                                          'fastkafka._components.producer_decorator.send_batch': ( 'producerdecorator.html#send_batch',\n                                                                                                                   'fastkafka/_components/producer_decorator.py'),\n                                                          'fastkafka._components.producer_decorator.unwrap_from_kafka_event': ( 'producerdecorator.html#unwrap_from_kafka_event',\n                                                                                                                                'fastkafka/_components/producer_decorator.py')},\n            'fastkafka._components.task_streaming': { 'fastkafka._components.task_streaming.DynamicTaskExecutor': ( 'taskstreaming.html#dynamictaskexecutor',\n                                                                                                                    'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.DynamicTaskExecutor.__init__': ( 'taskstreaming.html#dynamictaskexecutor.__init__',\n                                                                                                                             'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.DynamicTaskExecutor.run': ( 'taskstreaming.html#dynamictaskexecutor.run',\n                                                                                                                        'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.ExceptionMonitor': ( 'taskstreaming.html#exceptionmonitor',\n                                                                                                                 'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.ExceptionMonitor.__aenter__': ( 'taskstreaming.html#exceptionmonitor.__aenter__',\n                                                                                                                            'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.ExceptionMonitor.__aexit__': ( 'taskstreaming.html#exceptionmonitor.__aexit__',\n                                                                                                                           'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.ExceptionMonitor.__init__': ( 'taskstreaming.html#exceptionmonitor.__init__',\n                                                                                                                          'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.ExceptionMonitor._monitor_step': ( 'taskstreaming.html#exceptionmonitor._monitor_step',\n                                                                                                                               'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.ExceptionMonitor.on_error': ( 'taskstreaming.html#exceptionmonitor.on_error',\n                                                                                                                          'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.SequentialExecutor': ( 'taskstreaming.html#sequentialexecutor',\n                                                                                                                   'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.SequentialExecutor.__init__': ( 'taskstreaming.html#sequentialexecutor.__init__',\n                                                                                                                            'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.SequentialExecutor.run': ( 'taskstreaming.html#sequentialexecutor.run',\n                                                                                                                       'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.StreamExecutor': ( 'taskstreaming.html#streamexecutor',\n                                                                                                               'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.StreamExecutor.run': ( 'taskstreaming.html#streamexecutor.run',\n                                                                                                                   'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool': ( 'taskstreaming.html#taskpool',\n                                                                                                         'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool.__aenter__': ( 'taskstreaming.html#taskpool.__aenter__',\n                                                                                                                    'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool.__aexit__': ( 'taskstreaming.html#taskpool.__aexit__',\n                                                                                                                   'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool.__init__': ( 'taskstreaming.html#taskpool.__init__',\n                                                                                                                  'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool.__len__': ( 'taskstreaming.html#taskpool.__len__',\n                                                                                                                 'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool.add': ( 'taskstreaming.html#taskpool.add',\n                                                                                                             'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool.discard': ( 'taskstreaming.html#taskpool.discard',\n                                                                                                                 'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.TaskPool.log_error': ( 'taskstreaming.html#taskpool.log_error',\n                                                                                                                   'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming._process_items_coro': ( 'taskstreaming.html#_process_items_coro',\n                                                                                                                    'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming._process_items_task': ( 'taskstreaming.html#_process_items_task',\n                                                                                                                    'fastkafka/_components/task_streaming.py'),\n                                                      'fastkafka._components.task_streaming.get_executor': ( 'taskstreaming.html#get_executor',\n                                                                                                             'fastkafka/_components/task_streaming.py')},\n            'fastkafka._components.test_dependencies': { 'fastkafka._components.test_dependencies.VersionParser': ( 'test_dependencies.html#versionparser',\n                                                                                                                    'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies.VersionParser.__init__': ( 'test_dependencies.html#versionparser.__init__',\n                                                                                                                             'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies.VersionParser.handle_data': ( 'test_dependencies.html#versionparser.handle_data',\n                                                                                                                                'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies._install_java': ( 'test_dependencies.html#_install_java',\n                                                                                                                    'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies._install_kafka': ( 'test_dependencies.html#_install_kafka',\n                                                                                                                     'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies._install_testing_deps': ( 'test_dependencies.html#_install_testing_deps',\n                                                                                                                            'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies.check_java': ( 'test_dependencies.html#check_java',\n                                                                                                                 'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies.check_kafka': ( 'test_dependencies.html#check_kafka',\n                                                                                                                  'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies.generate_app_in_tmp': ( 'test_dependencies.html#generate_app_in_tmp',\n                                                                                                                          'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies.generate_app_src': ( 'test_dependencies.html#generate_app_src',\n                                                                                                                       'fastkafka/_components/test_dependencies.py'),\n                                                         'fastkafka._components.test_dependencies.get_kafka_version': ( 'test_dependencies.html#get_kafka_version',\n                                                                                                                        'fastkafka/_components/test_dependencies.py')},\n            'fastkafka._testing.apache_kafka_broker': { 'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker': ( 'apachekafkabroker.html#apachekafkabroker',\n                                                                                                                      'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.__aenter__': ( 'apachekafkabroker.html#apachekafkabroker.__aenter__',\n                                                                                                                                 'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.__aexit__': ( 'apachekafkabroker.html#apachekafkabroker.__aexit__',\n                                                                                                                                'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.__enter__': ( 'apachekafkabroker.html#apachekafkabroker.__enter__',\n                                                                                                                                'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.__exit__': ( 'apachekafkabroker.html#apachekafkabroker.__exit__',\n                                                                                                                               'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.__init__': ( 'apachekafkabroker.html#apachekafkabroker.__init__',\n                                                                                                                               'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker._check_deps': ( 'apachekafkabroker.html#apachekafkabroker._check_deps',\n                                                                                                                                  'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker._create_topics': ( 'apachekafkabroker.html#apachekafkabroker._create_topics',\n                                                                                                                                     'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker._start': ( 'apachekafkabroker.html#apachekafkabroker._start',\n                                                                                                                             'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker._start_kafka': ( 'apachekafkabroker.html#apachekafkabroker._start_kafka',\n                                                                                                                                   'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker._start_service': ( 'apachekafkabroker.html#apachekafkabroker._start_service',\n                                                                                                                                     'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker._start_zookeeper': ( 'apachekafkabroker.html#apachekafkabroker._start_zookeeper',\n                                                                                                                                       'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker._stop': ( 'apachekafkabroker.html#apachekafkabroker._stop',\n                                                                                                                            'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.get_service_config_string': ( 'apachekafkabroker.html#apachekafkabroker.get_service_config_string',\n                                                                                                                                                'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.is_started': ( 'apachekafkabroker.html#apachekafkabroker.is_started',\n                                                                                                                                 'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.start': ( 'apachekafkabroker.html#apachekafkabroker.start',\n                                                                                                                            'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.stop': ( 'apachekafkabroker.html#apachekafkabroker.stop',\n                                                                                                                           'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker._get_unique_local_brokers_to_start': ( 'apachekafkabroker.html#_get_unique_local_brokers_to_start',\n                                                                                                                                       'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker._start_and_stop_brokers': ( 'apachekafkabroker.html#_start_and_stop_brokers',\n                                                                                                                            'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker._start_broker': ( 'apachekafkabroker.html#_start_broker',\n                                                                                                                  'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker._stop_broker': ( 'apachekafkabroker.html#_stop_broker',\n                                                                                                                 'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.get_free_port': ( 'apachekafkabroker.html#get_free_port',\n                                                                                                                  'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.get_kafka_config_string': ( 'apachekafkabroker.html#get_kafka_config_string',\n                                                                                                                            'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.get_zookeeper_config_string': ( 'apachekafkabroker.html#get_zookeeper_config_string',\n                                                                                                                                'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.is_port_in_use': ( 'apachekafkabroker.html#is_port_in_use',\n                                                                                                                   'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.run_and_match': ( 'apachekafkabroker.html#run_and_match',\n                                                                                                                  'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.start_apache_kafka_brokers': ( 'apachekafkabroker.html#start_apache_kafka_brokers',\n                                                                                                                               'fastkafka/_testing/apache_kafka_broker.py'),\n                                                        'fastkafka._testing.apache_kafka_broker.write_config_and_run': ( 'apachekafkabroker.html#write_config_and_run',\n                                                                                                                         'fastkafka/_testing/apache_kafka_broker.py')},\n            'fastkafka._testing.in_memory_broker': { 'fastkafka._testing.in_memory_broker.GroupMetadata': ( 'inmemorybroker.html#groupmetadata',\n                                                                                                            'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.GroupMetadata.__init__': ( 'inmemorybroker.html#groupmetadata.__init__',\n                                                                                                                     'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.GroupMetadata.assign_partitions': ( 'inmemorybroker.html#groupmetadata.assign_partitions',\n                                                                                                                              'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.GroupMetadata.get_partitions': ( 'inmemorybroker.html#groupmetadata.get_partitions',\n                                                                                                                           'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.GroupMetadata.rebalance': ( 'inmemorybroker.html#groupmetadata.rebalance',\n                                                                                                                      'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.GroupMetadata.set_offset': ( 'inmemorybroker.html#groupmetadata.set_offset',\n                                                                                                                       'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.GroupMetadata.subscribe': ( 'inmemorybroker.html#groupmetadata.subscribe',\n                                                                                                                      'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.GroupMetadata.unsubscribe': ( 'inmemorybroker.html#groupmetadata.unsubscribe',\n                                                                                                                        'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker': ( 'inmemorybroker.html#inmemorybroker',\n                                                                                                             'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.__init__': ( 'inmemorybroker.html#inmemorybroker.__init__',\n                                                                                                                      'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker._start': ( 'inmemorybroker.html#inmemorybroker._start',\n                                                                                                                    'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker._stop': ( 'inmemorybroker.html#inmemorybroker._stop',\n                                                                                                                   'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.connect': ( 'inmemorybroker.html#inmemorybroker.connect',\n                                                                                                                     'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.dissconnect': ( 'inmemorybroker.html#inmemorybroker.dissconnect',\n                                                                                                                         'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.lifecycle': ( 'inmemorybroker.html#inmemorybroker.lifecycle',\n                                                                                                                       'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.read': ( 'inmemorybroker.html#inmemorybroker.read',\n                                                                                                                  'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.subscribe': ( 'inmemorybroker.html#inmemorybroker.subscribe',\n                                                                                                                       'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.unsubscribe': ( 'inmemorybroker.html#inmemorybroker.unsubscribe',\n                                                                                                                         'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryBroker.write': ( 'inmemorybroker.html#inmemorybroker.write',\n                                                                                                                   'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryConsumer': ( 'inmemorybroker.html#inmemoryconsumer',\n                                                                                                               'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryConsumer.__call__': ( 'inmemorybroker.html#inmemoryconsumer.__call__',\n                                                                                                                        'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryConsumer.__init__': ( 'inmemorybroker.html#inmemoryconsumer.__init__',\n                                                                                                                        'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryConsumer.getmany': ( 'inmemorybroker.html#inmemoryconsumer.getmany',\n                                                                                                                       'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryConsumer.start': ( 'inmemorybroker.html#inmemoryconsumer.start',\n                                                                                                                     'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryConsumer.stop': ( 'inmemorybroker.html#inmemoryconsumer.stop',\n                                                                                                                    'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryConsumer.subscribe': ( 'inmemorybroker.html#inmemoryconsumer.subscribe',\n                                                                                                                         'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer': ( 'inmemorybroker.html#inmemoryproducer',\n                                                                                                               'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.__call__': ( 'inmemorybroker.html#inmemoryproducer.__call__',\n                                                                                                                        'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.__init__': ( 'inmemorybroker.html#inmemoryproducer.__init__',\n                                                                                                                        'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer._partition': ( 'inmemorybroker.html#inmemoryproducer._partition',\n                                                                                                                          'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.create_batch': ( 'inmemorybroker.html#inmemoryproducer.create_batch',\n                                                                                                                            'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.partitions_for': ( 'inmemorybroker.html#inmemoryproducer.partitions_for',\n                                                                                                                              'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.send': ( 'inmemorybroker.html#inmemoryproducer.send',\n                                                                                                                    'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.send_batch': ( 'inmemorybroker.html#inmemoryproducer.send_batch',\n                                                                                                                          'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.start': ( 'inmemorybroker.html#inmemoryproducer.start',\n                                                                                                                     'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.InMemoryProducer.stop': ( 'inmemorybroker.html#inmemoryproducer.stop',\n                                                                                                                    'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaPartition': ( 'inmemorybroker.html#kafkapartition',\n                                                                                                             'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaPartition.__init__': ( 'inmemorybroker.html#kafkapartition.__init__',\n                                                                                                                      'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaPartition.latest_offset': ( 'inmemorybroker.html#kafkapartition.latest_offset',\n                                                                                                                           'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaPartition.read': ( 'inmemorybroker.html#kafkapartition.read',\n                                                                                                                  'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaPartition.write': ( 'inmemorybroker.html#kafkapartition.write',\n                                                                                                                   'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaRecord': ( 'inmemorybroker.html#kafkarecord',\n                                                                                                          'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaTopic': ( 'inmemorybroker.html#kafkatopic',\n                                                                                                         'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaTopic.__init__': ( 'inmemorybroker.html#kafkatopic.__init__',\n                                                                                                                  'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaTopic.latest_offset': ( 'inmemorybroker.html#kafkatopic.latest_offset',\n                                                                                                                       'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaTopic.read': ( 'inmemorybroker.html#kafkatopic.read',\n                                                                                                              'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaTopic.write': ( 'inmemorybroker.html#kafkatopic.write',\n                                                                                                               'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaTopic.write_with_key': ( 'inmemorybroker.html#kafkatopic.write_with_key',\n                                                                                                                        'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.KafkaTopic.write_with_partition': ( 'inmemorybroker.html#kafkatopic.write_with_partition',\n                                                                                                                              'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.MockBatch': ( 'inmemorybroker.html#mockbatch',\n                                                                                                        'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.MockBatch.__init__': ( 'inmemorybroker.html#mockbatch.__init__',\n                                                                                                                 'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.MockBatch.append': ( 'inmemorybroker.html#mockbatch.append',\n                                                                                                               'fastkafka/_testing/in_memory_broker.py'),\n                                                     'fastkafka._testing.in_memory_broker.split_list': ( 'inmemorybroker.html#split_list',\n                                                                                                         'fastkafka/_testing/in_memory_broker.py')},\n            'fastkafka._testing.local_redpanda_broker': { 'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker': ( 'localredpandabroker.html#localredpandabroker',\n                                                                                                                            'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.__aenter__': ( 'localredpandabroker.html#localredpandabroker.__aenter__',\n                                                                                                                                       'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.__aexit__': ( 'localredpandabroker.html#localredpandabroker.__aexit__',\n                                                                                                                                      'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.__enter__': ( 'localredpandabroker.html#localredpandabroker.__enter__',\n                                                                                                                                      'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.__exit__': ( 'localredpandabroker.html#localredpandabroker.__exit__',\n                                                                                                                                     'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.__init__': ( 'localredpandabroker.html#localredpandabroker.__init__',\n                                                                                                                                     'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker._check_deps': ( 'localredpandabroker.html#localredpandabroker._check_deps',\n                                                                                                                                        'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker._create_topics': ( 'localredpandabroker.html#localredpandabroker._create_topics',\n                                                                                                                                           'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker._start': ( 'localredpandabroker.html#localredpandabroker._start',\n                                                                                                                                   'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker._start_redpanda': ( 'localredpandabroker.html#localredpandabroker._start_redpanda',\n                                                                                                                                            'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker._stop': ( 'localredpandabroker.html#localredpandabroker._stop',\n                                                                                                                                  'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.get_service_config_string': ( 'localredpandabroker.html#localredpandabroker.get_service_config_string',\n                                                                                                                                                      'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.is_started': ( 'localredpandabroker.html#localredpandabroker.is_started',\n                                                                                                                                       'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.start': ( 'localredpandabroker.html#localredpandabroker.start',\n                                                                                                                                  'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.stop': ( 'localredpandabroker.html#localredpandabroker.stop',\n                                                                                                                                 'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.check_docker': ( 'localredpandabroker.html#check_docker',\n                                                                                                                     'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.get_redpanda_docker_cmd': ( 'localredpandabroker.html#get_redpanda_docker_cmd',\n                                                                                                                                'fastkafka/_testing/local_redpanda_broker.py'),\n                                                          'fastkafka._testing.local_redpanda_broker.start_redpanda_brokers': ( 'localredpandabroker.html#start_redpanda_brokers',\n                                                                                                                               'fastkafka/_testing/local_redpanda_broker.py')},\n            'fastkafka._testing.test_utils': { 'fastkafka._testing.test_utils.display_docs': ( 'test_utils.html#display_docs',\n                                                                                               'fastkafka/_testing/test_utils.py'),\n                                               'fastkafka._testing.test_utils.mock_AIOKafkaProducer_send': ( 'test_utils.html#mock_aiokafkaproducer_send',\n                                                                                                             'fastkafka/_testing/test_utils.py'),\n                                               'fastkafka._testing.test_utils.nb_safe_seed': ( 'test_utils.html#nb_safe_seed',\n                                                                                               'fastkafka/_testing/test_utils.py'),\n                                               'fastkafka._testing.test_utils.run_script_and_cancel': ( 'test_utils.html#run_script_and_cancel',\n                                                                                                        'fastkafka/_testing/test_utils.py')},\n            'fastkafka.encoder': {'fastkafka.encoder.dummy': ('encoder_export.html#dummy', 'fastkafka/encoder.py')},\n            'fastkafka.executors': {'fastkafka.executors.dummy': ('application_executors_export.html#dummy', 'fastkafka/executors.py')},\n            'fastkafka.testing': {'fastkafka.testing.dummy': ('testing_export.html#dummy', 'fastkafka/testing.py')}}}\n"
  },
  {
    "path": "fastkafka/_server.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/021_FastKafkaServer.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'ServerProcess', 'run_fastkafka_server_process', 'run_fastkafka_server', 'run_in_process']\n\n# %% ../nbs/021_FastKafkaServer.ipynb 1\nimport asyncio\nimport multiprocessing\nimport platform\nimport signal\nimport threading\nfrom contextlib import contextmanager\nfrom typing import *\nfrom types import FrameType\n\nimport asyncer\nimport typer\n\nfrom ._components.helpers import _import_from_string\nfrom ._components.logger import get_logger\nfrom ._components._subprocess import terminate_asyncio_process\n\n# %% ../nbs/021_FastKafkaServer.ipynb 5\nlogger = get_logger(__name__, level=20)\n\n# %% ../nbs/021_FastKafkaServer.ipynb 7\nclass ServerProcess:\n    def __init__(self, app: str, kafka_broker_name: str):\n        \"\"\"\n        Represents a server process for running the FastKafka application.\n\n        Args:\n            app (str): Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\n            kafka_broker_name (str): The name of the Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\n        \"\"\"\n        self.app = app\n        self.should_exit = False\n        self.kafka_broker_name = kafka_broker_name\n\n    def run(self) -> None:\n        \"\"\"\n        Runs the FastKafka application server process.\n        \"\"\"\n        return asyncio.run(self._serve())\n\n    async def _serve(self) -> None:\n        \"\"\"\n        Internal method that runs the FastKafka application server.\n        \"\"\"\n        self._install_signal_handlers()\n\n        self.application = _import_from_string(self.app)\n        self.application.set_kafka_broker(self.kafka_broker_name)\n\n        async with self.application:\n            await self._main_loop()\n\n    def _install_signal_handlers(self) -> None:\n        \"\"\"\n        Installs signal handlers for handling termination signals.\n        \"\"\"\n        if threading.current_thread() is not threading.main_thread():\n            raise RuntimeError()\n\n        loop = asyncio.get_event_loop()\n\n        HANDLED_SIGNALS = (\n            signal.SIGINT,  # Unix signal 2. Sent by Ctrl+C.\n            signal.SIGTERM,  # Unix signal 15. Sent by `kill <pid>`.\n        )\n        if platform.system() == \"Windows\":\n            HANDLED_SIGNALS = (*HANDLED_SIGNALS, signal.SIGBREAK)  # type: ignore\n\n        def handle_windows_exit(signum: int, frame: Optional[FrameType]) -> None:\n            self.should_exit = True\n\n        def handle_exit(sig: int) -> None:\n            self.should_exit = True\n\n        for sig in HANDLED_SIGNALS:\n            if platform.system() == \"Windows\":\n                signal.signal(sig, handle_windows_exit)\n            else:\n                loop.add_signal_handler(sig, handle_exit, sig)\n\n    async def _main_loop(self) -> None:\n        \"\"\"\n        Main loop for the FastKafka application server process.\n        \"\"\"\n        while not self.should_exit:\n            await asyncio.sleep(0.1)\n\n# %% ../nbs/021_FastKafkaServer.ipynb 8\n_app = typer.Typer()\n\n\n@_app.command()\ndef run_fastkafka_server_process(\n    app: str = typer.Argument(\n        ...,\n        help=\"Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\",\n    ),\n    kafka_broker: str = typer.Option(\n        ...,\n        help=\"Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\",\n    ),\n) -> None:\n    ServerProcess(app, kafka_broker).run()\n\n# %% ../nbs/021_FastKafkaServer.ipynb 11\nasync def run_fastkafka_server(num_workers: int, app: str, kafka_broker: str) -> None:\n    \"\"\"\n    Runs the FastKafka server with multiple worker processes.\n\n    Args:\n        num_workers (int): Number of FastKafka instances to run.\n        app (str): Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\n        kafka_broker (str): Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\n    \"\"\"\n    loop = asyncio.get_event_loop()\n\n    HANDLED_SIGNALS = (\n        signal.SIGINT,  # Unix signal 2. Sent by Ctrl+C.\n        signal.SIGTERM,  # Unix signal 15. Sent by `kill <pid>`.\n    )\n    if platform.system() == \"Windows\":\n        HANDLED_SIGNALS = (*HANDLED_SIGNALS, signal.SIGBREAK)  # type: ignore\n\n    d = {\"should_exit\": False}\n\n    def handle_windows_exit(\n        signum: int, frame: Optional[FrameType], d: Dict[str, bool] = d\n    ) -> None:\n        d[\"should_exit\"] = True\n\n    def handle_exit(sig: int, d: Dict[str, bool] = d) -> None:\n        d[\"should_exit\"] = True\n\n    for sig in HANDLED_SIGNALS:\n        if platform.system() == \"Windows\":\n            signal.signal(sig, handle_windows_exit)\n        else:\n            loop.add_signal_handler(sig, handle_exit, sig)\n\n    async with asyncer.create_task_group() as tg:\n        args = [\n            \"run_fastkafka_server_process\",\n            \"--kafka-broker\",\n            kafka_broker,\n            app,\n        ]\n        tasks = [\n            tg.soonify(asyncio.create_subprocess_exec)(\n                *args,\n                limit=1024 * 1024,  # Set StreamReader buffer limit to 1MB\n                stdout=asyncio.subprocess.PIPE,\n                stdin=asyncio.subprocess.PIPE,\n            )\n            for i in range(num_workers)\n        ]\n\n    procs = [task.value for task in tasks]\n\n    async def log_output(\n        output: Optional[asyncio.StreamReader], pid: int, d: Dict[str, bool] = d\n    ) -> None:\n        if output is None:\n            raise RuntimeError(\"Expected StreamReader, got None. Is stdout piped?\")\n        while not output.at_eof():\n            try:\n                outs = await output.readline()\n            except ValueError:\n                typer.echo(f\"[{pid:03d}]: Failed to read log output\", nl=False)\n                continue\n            if outs != b\"\":\n                typer.echo(f\"[{pid:03d}]: \" + outs.decode(\"utf-8\").strip(), nl=False)\n\n    async with asyncer.create_task_group() as tg:\n        for proc in procs:\n            tg.soonify(log_output)(proc.stdout, proc.pid)\n\n        while not d[\"should_exit\"]:\n            await asyncio.sleep(0.2)\n\n        typer.echo(\"Starting process cleanup, this may take a few seconds...\")\n        for proc in procs:\n            tg.soonify(terminate_asyncio_process)(proc)\n\n    for proc in procs:\n        output, _ = await proc.communicate()\n        if output:\n            typer.echo(f\"[{proc.pid:03d}]: \" + output.decode(\"utf-8\").strip(), nl=False)\n\n    returncodes = [proc.returncode for proc in procs]\n    if not returncodes == [0] * len(procs):\n        typer.secho(\n            f\"Return codes are not all zero: {returncodes}\",\n            err=True,\n            fg=typer.colors.RED,\n        )\n        raise typer.Exit(1)\n\n# %% ../nbs/021_FastKafkaServer.ipynb 12\n@contextmanager\ndef run_in_process(\n    target: Callable[..., Any]\n) -> Generator[multiprocessing.Process, None, None]:\n    \"\"\"\n    Runs the target function in a separate process.\n\n    Args:\n        target (Callable[..., Any]): The function to run in a separate process.\n\n    Yields:\n        Generator[multiprocessing.Process, None, None]: A generator that yields the process object.\n    \"\"\"\n    p = multiprocessing.Process(target=target)\n    try:\n        p.start()\n        yield p\n    except Exception as e:\n        print(f\"Exception raised {e=}\")\n    finally:\n        p.terminate()\n        p.join()\n"
  },
  {
    "path": "fastkafka/_testing/__init__.py",
    "content": ""
  },
  {
    "path": "fastkafka/_testing/apache_kafka_broker.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/002_ApacheKafkaBroker.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'T', 'get_zookeeper_config_string', 'get_kafka_config_string', 'ApacheKafkaBroker', 'run_and_match',\n           'is_port_in_use', 'get_free_port', 'write_config_and_run', 'start_apache_kafka_brokers']\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 1\nimport asyncio\nimport re\nimport platform\nimport socket\nimport subprocess  # nosec\nfrom contextlib import asynccontextmanager\nfrom datetime import datetime, timedelta\nfrom os import environ\nfrom pathlib import Path\nfrom tempfile import TemporaryDirectory\nfrom typing import *\n\nimport asyncer\nimport nest_asyncio\n\nfrom .._components._subprocess import terminate_asyncio_process\nfrom .._components.helpers import in_notebook\nfrom .._components.logger import get_logger\nfrom .._components.meta import delegates, export, filter_using_signature, patch\nfrom .._components.test_dependencies import check_java, check_kafka\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 3\nif in_notebook():\n    from tqdm.notebook import tqdm\nelse:\n    from tqdm import tqdm\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 8\ndef get_zookeeper_config_string(\n    data_dir: Union[str, Path],  # the directory where the snapshot is stored.\n    zookeeper_port: int = 2181,  # the port at which the clients will connect\n) -> str:\n    \"\"\"Generates a zookeeeper configuration string that can be exported to file\n    and used to start a zookeeper instance.\n\n    Args:\n        data_dir: Path to the directory where the zookeepeer instance will save data\n        zookeeper_port: Port for clients (Kafka brokes) to connect\n    Returns:\n        Zookeeper configuration string.\n\n    \"\"\"\n    zookeeper_data_dir = str((Path(data_dir) / \"zookeeper\").resolve())\n    if platform.system() == \"Windows\":\n        zookeeper_data_dir = zookeeper_data_dir.replace(\"\\\\\", \"/\")\n    zookeeper_config = f\"\"\"dataDir={zookeeper_data_dir}\nclientPort={zookeeper_port}\nmaxClientCnxns=0\nadmin.enableServer=false\n\"\"\"\n\n    return zookeeper_config\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 10\ndef get_kafka_config_string(\n    data_dir: Union[str, Path], zookeeper_port: int = 2181, listener_port: int = 9092\n) -> str:\n    \"\"\"Generates a kafka broker configuration string that can be exported to file\n    and used to start a kafka broker instance.\n\n    Args:\n        data_dir: Path to the directory where the kafka broker instance will save data\n        zookeeper_port: Port on which the zookeeper instance is running\n        listener_port: Port on which the clients (producers and consumers) can connect\n    Returns:\n        Kafka broker configuration string.\n\n    \"\"\"\n    kafka_logs_dir = str((Path(data_dir) / \"kafka_logs\").resolve())\n    if platform.system() == \"Windows\":\n        kafka_logs_dir = kafka_logs_dir.replace(\"\\\\\", \"/\")\n    kafka_config = f\"\"\"broker.id=0\n\n############################# Socket Server Settings #############################\n\n# The address the socket server listens on. If not configured, the host name will be equal to the value of\n# java.net.InetAddress.getCanonicalHostName(), with PLAINTEXT listener name, and port 9092.\n#   FORMAT:\n#     listeners = listener_name://host_name:port\n#   EXAMPLE:\n#     listeners = PLAINTEXT://your.host.name:9092\nlisteners=PLAINTEXT://:{listener_port}\n\n# Listener name, hostname and port the broker will advertise to clients.\n# If not set, it uses the value for \"listeners\".\n# advertised.listeners=PLAINTEXT://localhost:{listener_port}\n\n# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details\n#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL\n\n# The number of threads that the server uses for receiving requests from the network and sending responses to the network\nnum.network.threads=3\n\n# The number of threads that the server uses for processing requests, which may include disk I/O\nnum.io.threads=8\n\n# The send buffer (SO_SNDBUF) used by the socket server\nsocket.send.buffer.bytes=102400\n\n# The receive buffer (SO_RCVBUF) used by the socket server\nsocket.receive.buffer.bytes=102400\n\n# The maximum size of a request that the socket server will accept (protection against OOM)\nsocket.request.max.bytes=104857600\n\n\n############################# Log Basics #############################\n\n# A comma separated list of directories under which to store log files\nlog.dirs={kafka_logs_dir}\n\n# The default number of log partitions per topic. More partitions allow greater\n# parallelism for consumption, but this will also result in more files across\n# the brokers.\nnum.partitions=1\n\n# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown.\n# This value is recommended to be increased for installations with data dirs located in RAID array.\nnum.recovery.threads.per.data.dir=1\n\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\n# The number of messages to accept before forcing a flush of data to disk\nlog.flush.interval.messages=10000\n\n# The maximum amount of time a message can sit in a log before we force a flush\nlog.flush.interval.ms=1000\n\n# The minimum age of a log file to be eligible for deletion due to age\nlog.retention.hours=168\n\n# A size-based retention policy for logs. Segments are pruned from the log unless the remaining\n# segments drop below log.retention.bytes. Functions independently of log.retention.hours.\nlog.retention.bytes=1073741824\n\n# The maximum size of a log segment file. When this size is reached a new log segment will be created.\nlog.segment.bytes=1073741824\n\n# The interval at which log segments are checked to see if they can be deleted according to the retention policies\nlog.retention.check.interval.ms=300000\n\n# Zookeeper connection string (see zookeeper docs for details).\nzookeeper.connect=localhost:{zookeeper_port}\n\n# Timeout in ms for connecting to zookeeper\nzookeeper.connection.timeout.ms=18000\n\n# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance.\ngroup.initial.rebalance.delay.ms=0\n\"\"\"\n\n    return kafka_config\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 12\n@export(\"fastkafka.testing\")\nclass ApacheKafkaBroker:\n    \"\"\"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.\"\"\"\n\n    @delegates(get_kafka_config_string, but=[\"data_dir\"])\n    @delegates(get_zookeeper_config_string, keep=True, but=[\"data_dir\"])\n    def __init__(\n        self,\n        topics: Iterable[str] = [],\n        *,\n        retries: int = 3,\n        apply_nest_asyncio: bool = False,\n        **kwargs: Dict[str, Any],\n    ):\n        \"\"\"Initialises the ApacheKafkaBroker object\n\n        Args:\n            topics: List of topics to create after sucessfull Kafka broker startup\n            retries: Number of retries to create kafka and zookeeper services using random\n            apply_nest_asyncio: set to True if running in notebook\n            zookeeper_port: Port for clients (Kafka brokes) to connect\n            listener_port: Port on which the clients (producers and consumers) can connect\n        \"\"\"\n        self.zookeeper_kwargs = filter_using_signature(\n            get_zookeeper_config_string, **kwargs\n        )\n        self.kafka_kwargs = filter_using_signature(get_kafka_config_string, **kwargs)\n\n        if \"zookeeper_port\" not in self.zookeeper_kwargs:\n            self.zookeeper_kwargs[\"zookeeper_port\"] = 2181\n            self.kafka_kwargs[\"zookeeper_port\"] = 2181\n\n        if \"listener_port\" not in self.kafka_kwargs:\n            self.kafka_kwargs[\"listener_port\"] = 9092\n\n        self.retries = retries\n        self.apply_nest_asyncio = apply_nest_asyncio\n        self.temporary_directory: Optional[TemporaryDirectory] = None\n        self.temporary_directory_path: Optional[Path] = None\n        self.kafka_task: Optional[asyncio.subprocess.Process] = None\n        self.zookeeper_task: Optional[asyncio.subprocess.Process] = None\n        self._is_started = False\n        self.topics: Iterable[str] = topics\n\n    @property\n    def is_started(self) -> bool:\n        \"\"\"Property indicating whether the ApacheKafkaBroker object is started.\n\n        The is_started property indicates if the ApacheKafkaBroker object is currently\n        in a started state. This implies that Zookeeper and Kafka broker processes have\n        sucesfully started and are ready for handling events.\n\n        Returns:\n            bool: True if the object is started, False otherwise.\n        \"\"\"\n        return self._is_started\n\n    @classmethod\n    def _check_deps(cls) -> None:\n        \"\"\"Prepares the environment for running Kafka brokers.\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    async def _start(self) -> str:\n        \"\"\"Starts a local kafka broker and zookeeper instance asynchronously\n        Returns:\n           Kafka broker bootstrap server address in string format: add:port\n        \"\"\"\n        raise NotImplementedError\n\n    def start(self) -> str:\n        \"\"\"Starts a local kafka broker and zookeeper instance synchronously\n        Returns:\n           Kafka broker bootstrap server address in string format: add:port\n        \"\"\"\n        raise NotImplementedError\n\n    def stop(self) -> None:\n        \"\"\"Stops a local kafka broker and zookeeper instance synchronously\"\"\"\n        raise NotImplementedError\n\n    async def _stop(self) -> None:\n        \"\"\"Stops a local kafka broker and zookeeper instance synchronously\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    def get_service_config_string(self, service: str, *, data_dir: Path) -> str:\n        \"\"\"Generates a configuration for a service\n        Args:\n            data_dir: Path to the directory where the zookeepeer instance will save data\n            service: \"kafka\" or \"zookeeper\", defines which service to get config string for\n        \"\"\"\n        raise NotImplementedError\n\n    async def _start_service(self, service: str = \"kafka\") -> None:\n        \"\"\"Starts the service according to defined service var\n        Args:\n            service: \"kafka\" or \"zookeeper\", defines which service to start\n        \"\"\"\n        raise NotImplementedError\n\n    async def _start_zookeeper(self) -> None:\n        \"\"\"Start a local zookeeper instance\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    async def _start_kafka(self) -> None:\n        \"\"\"Start a local kafka broker\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    async def _create_topics(self) -> None:\n        \"\"\"Create missing topics in local Kafka broker\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    def __enter__(self) -> str:\n        #         ApacheKafkaBroker._check_deps()\n        return self.start()\n\n    def __exit__(self, *args: Any, **kwargs: Any) -> None:\n        self.stop()\n\n    async def __aenter__(self) -> str:\n        #         ApacheKafkaBroker._check_deps()\n        return await self._start()\n\n    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\n        await self._stop()\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 14\n@patch(cls_method=True)  # type: ignore\ndef _check_deps(cls: ApacheKafkaBroker) -> None:\n    \"\"\"Checks the dependencies required to run Apache KafkaBroker.\n\n    Raises:\n        RuntimeError: If JDK installation or Kafka installation is not found.\n    \"\"\"\n    if not check_java():\n        raise RuntimeError(\n            \"JDK installation not found! Please install JDK manually or run 'fastkafka testing install_deps'.\"\n        )\n    if not check_kafka():\n        raise RuntimeError(\n            \"Kafka installation not found! Please install Kafka tools manually or run 'fastkafka testing install_deps'.\"\n        )\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 17\nasync def run_and_match(\n    *args: str,\n    capture: str = \"stdout\",\n    timeout: int = 5,\n    pattern: str,\n    num_to_match: int = 1,\n) -> asyncio.subprocess.Process:\n    \"\"\"Runs a command asynchronously and matches the output against a pattern.\n\n    Args:\n        *args: Command-line arguments for the subprocess.\n        capture: Which output to capture (\"stdout\" or \"stderr\").\n        timeout: Timeout in seconds for reading the output.\n        pattern: Regular expression pattern to match in the output.\n        num_to_match: Number of matches to wait for.\n\n    Returns:\n        The subprocess process object.\n\n    Raises:\n        ValueError: If the capture parameter has an unsupported value.\n        TimeoutError: If the process times out.\n        RuntimeError: If the process returns a non-zero return code.\n    \"\"\"\n    # Create the subprocess; redirect the standard output\n    # into a pipe.\n    matched = 0\n\n    if platform.system() == \"Windows\":\n        proc = await asyncio.create_subprocess_shell(\n            \" \".join(args),\n            stdout=asyncio.subprocess.PIPE,\n            stderr=asyncio.subprocess.PIPE,\n            creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,  # type: ignore\n        )\n    else:\n        proc = await asyncio.create_subprocess_exec(\n            *args,\n            stdout=asyncio.subprocess.PIPE,\n            stderr=asyncio.subprocess.PIPE,\n        )\n\n    # Read one line of output.\n    t = datetime.now()\n    while datetime.now() - t < timedelta(seconds=timeout):\n        try:\n            if capture == \"stdout\":\n                data = await asyncio.wait_for(proc.stdout.readline(), timeout=1.0)  # type: ignore\n            elif capture == \"stderr\":\n                data = await asyncio.wait_for(proc.stderr.readline(), timeout=1.0)  # type: ignore\n            else:\n                raise ValueError(\n                    f\"Unknown capture param value {capture}, supported values are 'stdout', 'stderr'\"\n                )\n            ddata = data.decode(\"utf-8\")\n\n            if len(re.findall(pattern, ddata)) > 0:\n                # print(f\"Matched: {ddata}\")\n                matched += 1\n                if matched == num_to_match:\n                    return proc\n        except asyncio.exceptions.TimeoutError as e:\n            pass\n\n        if proc.returncode is not None:\n            stdout, stderr = await proc.communicate()\n            dstdout = stdout.decode(\"utf-8\")\n            dstderr = stderr.decode(\"utf-8\")\n            if proc.returncode == 0:\n                raise TimeoutError(\n                    f\"stdout={dstdout}, stderr={dstderr}, returncode={proc.returncode}\"\n                )\n            else:\n                raise RuntimeError(\n                    f\"stdout={dstdout}, stderr={dstderr}, returncode={proc.returncode}\"\n                )\n\n    await terminate_asyncio_process(proc)\n\n    raise TimeoutError()\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 20\ndef is_port_in_use(port: Union[int, str]) -> bool:\n    \"\"\"\n    Checks if a port is already in use.\n\n    Args:\n        port (Union[int, str]): The port number to check. It can be provided as an integer or a string.\n\n    Returns:\n        bool: True if the port is in use, False otherwise.\n    \"\"\"\n    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n        return s.connect_ex((\"localhost\", int(port))) == 0\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 22\ndef get_free_port() -> str:\n    \"\"\"Gets a port number which is available and free in the system.\n\n    Returns:\n        The free port number as a string.\n    \"\"\"\n    s = socket.socket()\n    s.bind((\"127.0.0.1\", 0))\n    port = str(s.getsockname()[1])\n    s.close()\n    return port\n\n\nasync def write_config_and_run(\n    config: str, config_path: Union[str, Path], run_cmd: str\n) -> asyncio.subprocess.Process:\n    \"\"\"Writes the configuration to a file, and runs a command using the configuration.\n\n    Args:\n        config: The configuration string.\n        config_path: Path to the configuration file.\n        run_cmd: The command to run.\n\n    Returns:\n        The subprocess process object.\n    \"\"\"\n    with open(config_path, \"w\") as f:\n        f.write(config)\n\n    return await asyncio.create_subprocess_exec(\n        run_cmd,\n        config_path,\n        stdout=asyncio.subprocess.PIPE,\n        stdin=asyncio.subprocess.PIPE,\n    )\n\n\n@patch\ndef get_service_config_string(\n    self: ApacheKafkaBroker, service: str, *, data_dir: Path\n) -> str:\n    \"\"\"Gets the configuration string for a service.\n\n    Args:\n        service: Name of the service (\"kafka\" or \"zookeeper\").\n        data_dir: Path to the directory where the service will save data.\n\n    Returns:\n        The service configuration string.\n    \"\"\"\n    service_kwargs = getattr(self, f\"{service}_kwargs\")\n    if service == \"kafka\":\n        return get_kafka_config_string(data_dir=data_dir, **service_kwargs)\n    else:\n        return get_zookeeper_config_string(data_dir=data_dir, **service_kwargs)\n\n\n@patch\nasync def _start_service(self: ApacheKafkaBroker, service: str = \"kafka\") -> None:\n    \"\"\"Starts a service (kafka or zookeeper) asynchronously.\n\n    Args:\n        service: Name of the service (\"kafka\" or \"zookeeper\").\n    \"\"\"\n    logger.info(f\"Starting {service}...\")\n\n    if self.temporary_directory_path is None:\n        raise ValueError(\n            \"ApacheKafkaBroker._start_service(): self.temporary_directory_path is None, did you initialise it?\"\n        )\n\n    configs_tried: List[Dict[str, Any]] = []\n\n    for i in range(self.retries + 1):\n        configs_tried = configs_tried + [getattr(self, f\"{service}_kwargs\").copy()]\n\n        service_config_path = self.temporary_directory_path / f\"{service}.properties\"\n\n        with open(service_config_path, \"w\") as f:\n            f.write(\n                self.get_service_config_string(\n                    service, data_dir=self.temporary_directory_path\n                )\n            )\n\n        try:\n            port = (\n                self.zookeeper_kwargs[\"zookeeper_port\"]\n                if service == \"zookeeper\"\n                else self.kafka_kwargs[\"listener_port\"]\n            )\n            if is_port_in_use(port):\n                raise ValueError(f\"Port {port} is already in use\")\n\n            script_extension = \"bat\" if platform.system() == \"Windows\" else \"sh\"\n            service_start_script = f\"{service}-server-start.{script_extension}\"\n            service_task = await run_and_match(\n                service_start_script,\n                str(service_config_path),\n                pattern=\"Recorded new controller, from now on will use node\"\n                if service == \"kafka\"\n                else \"INFO Snapshot taken\",\n                timeout=30,\n            )\n        except Exception as e:\n            print(e)\n            logger.info(\n                f\"{service} startup failed, generating a new port and retrying...\"\n            )\n            port = get_free_port()\n            if service == \"zookeeper\":\n                self.zookeeper_kwargs[\"zookeeper_port\"] = port\n                self.kafka_kwargs[\"zookeeper_port\"] = port\n            else:\n                self.kafka_kwargs[\"listener_port\"] = port\n\n            logger.info(f\"{service} new port={port}\")\n        else:\n            setattr(self, f\"{service}_task\", service_task)\n            return\n\n    raise ValueError(f\"Could not start {service} with params: {configs_tried}\")\n\n\n@patch\nasync def _start_kafka(self: ApacheKafkaBroker) -> None:\n    \"\"\"Starts a local Kafka broker asynchronously.\"\"\"\n    return await self._start_service(\"kafka\")\n\n\n@patch\nasync def _start_zookeeper(self: ApacheKafkaBroker) -> None:\n    \"\"\"Starts a local ZooKeeper instance asynchronously.\"\"\"\n    return await self._start_service(\"zookeeper\")\n\n\n@patch\nasync def _create_topics(self: ApacheKafkaBroker) -> None:\n    \"\"\"Creates missing topics in a local Kafka broker asynchronously.\"\"\"\n    listener_port = self.kafka_kwargs.get(\"listener_port\", 9092)\n    bootstrap_server = f\"127.0.0.1:{listener_port}\"\n\n    script_extension = \"bat\" if platform.system() == \"Windows\" else \"sh\"\n    topics_script = f\"kafka-topics.{script_extension}\"\n    async with asyncer.create_task_group() as tg:\n        processes = [\n            tg.soonify(asyncio.create_subprocess_exec)(\n                topics_script,\n                \"--create\",\n                f\"--topic={topic}\",\n                f\"--bootstrap-server={bootstrap_server}\",\n                stdout=asyncio.subprocess.PIPE,\n                stdin=asyncio.subprocess.PIPE,\n            )\n            for topic in self.topics\n        ]\n\n    try:\n        return_values = [\n            await asyncio.wait_for(process.value.wait(), 30) for process in processes\n        ]\n        if any(return_value != 0 for return_value in return_values):\n            raise ValueError(\"Could not create missing topics!\")\n    except asyncio.TimeoutError as _:\n        raise ValueError(\"Timed out while creating missing topics!\")\n\n\n@patch\nasync def _start(self: ApacheKafkaBroker) -> str:\n    \"\"\"Starts a local Kafka broker and ZooKeeper instance asynchronously.\n\n    Returns:\n        The Kafka broker bootstrap server address in string format: host:port.\n    \"\"\"\n    self._check_deps()\n\n    self.temporary_directory = TemporaryDirectory()\n    self.temporary_directory_path = Path(self.temporary_directory.__enter__())\n\n    await self._start_zookeeper()\n    await self._start_kafka()\n\n    listener_port = self.kafka_kwargs.get(\"listener_port\", 9092)\n    bootstrap_server = f\"127.0.0.1:{listener_port}\"\n    logger.info(f\"Local Kafka broker up and running on {bootstrap_server}\")\n\n    await self._create_topics()\n\n    self._is_started = True\n\n    return bootstrap_server\n\n\n@patch\nasync def _stop(self: ApacheKafkaBroker) -> None:\n    \"\"\"Stops a local Kafka broker and ZooKeeper instance asynchronously.\"\"\"\n    await terminate_asyncio_process(self.kafka_task)  # type: ignore\n    await terminate_asyncio_process(self.zookeeper_task)  # type: ignore\n    self.temporary_directory.__exit__(None, None, None)  # type: ignore\n    self._is_started = False\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 25\n@patch\ndef start(self: ApacheKafkaBroker) -> str:\n    \"\"\"Starts a local Kafka broker and ZooKeeper instance synchronously.\n\n    Returns:\n        The Kafka broker bootstrap server address in string format: host:port.\n    \"\"\"\n    logger.info(f\"{self.__class__.__name__}.start(): entering...\")\n    try:\n        # get or create loop\n        try:\n            loop = asyncio.get_event_loop()\n        except RuntimeError as e:\n            logger.warning(\n                f\"{self.__class__.__name__}.start(): RuntimeError raised when calling asyncio.get_event_loop(): {e}\"\n            )\n            logger.warning(\n                f\"{self.__class__.__name__}.start(): asyncio.new_event_loop()\"\n            )\n            loop = asyncio.new_event_loop()\n\n        # start zookeeper and kafka broker in the loop\n\n        if loop.is_running():\n            if self.apply_nest_asyncio:\n                logger.warning(\n                    f\"{self.__class__.__name__}.start(): ({loop}) is already running!\"\n                )\n                logger.warning(\n                    f\"{self.__class__.__name__}.start(): calling nest_asyncio.apply()\"\n                )\n                nest_asyncio.apply(loop)\n            else:\n                msg = f\"{self.__class__.__name__}.start(): ({loop}) is already running! Use 'apply_nest_asyncio=True' when creating 'ApacheKafkaBroker' to prevent this.\"\n                logger.error(msg)\n                raise RuntimeError(msg)\n\n        retval = loop.run_until_complete(self._start())\n        logger.info(f\"{self.__class__}.start(): returning {retval}\")\n        return retval\n    finally:\n        logger.info(f\"{self.__class__.__name__}.start(): exited.\")\n\n\n@patch\ndef stop(self: ApacheKafkaBroker) -> None:\n    \"\"\"Stops a local kafka broker and zookeeper instance synchronously\"\"\"\n    logger.info(f\"{self.__class__.__name__}.stop(): entering...\")\n    try:\n        if not self._is_started:\n            raise RuntimeError(\n                \"ApacheKafkaBroker not started yet, please call ApacheKafkaBroker.start() before!\"\n            )\n\n        loop = asyncio.get_event_loop()\n        loop.run_until_complete(self._stop())\n    finally:\n        logger.info(f\"{self.__class__.__name__}.stop(): exited.\")\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 30\nasync def _start_broker(broker: Any) -> Union[Any, Exception]:\n    try:\n        await broker._start()\n        return broker\n    except Exception as e:\n        return e\n\n\nasync def _stop_broker(broker: Any) -> Union[Any, Exception]:\n    try:\n        await broker._stop()\n        return broker\n    except Exception as e:\n        return e\n\n\nasync def _get_unique_local_brokers_to_start(\n    kafka_brokers_name: str,\n    kafka_brokers: List[Dict[str, Dict[str, Any]]],\n    duplicate_ok: bool = False,\n    zookeeper_ports: List[int] = [2181],\n    ignore_nonlocal_brokers: bool = False,\n) -> List[Tuple[str, int]]:\n    brokers_to_start = [\n        x[kafka_brokers_name] for x in kafka_brokers if kafka_brokers_name in x\n    ]\n    unique_brokers_to_start = set([(x[\"url\"], x[\"port\"]) for x in brokers_to_start])\n\n    if len(unique_brokers_to_start) < len(brokers_to_start) and not duplicate_ok:\n        raise ValueError(\n            f\"Duplicate kafka_brokers are found - {brokers_to_start}. Please change values or use 'duplicate_ok=True'\"\n        )\n\n    unique_urls = set([x[0] for x in unique_brokers_to_start])\n    localhost_urls = set(\n        [\n            \"localhost\",\n            \"127.0.0.1\",\n            \"0.0.0.0\",  # nosec: B104 - Possible binding to all interfaces\n        ]\n    )\n    if not unique_urls.issubset(localhost_urls) and not ignore_nonlocal_brokers:\n        raise ValueError(\n            f\"URL values other than {', '.join(sorted(localhost_urls))} are found - {unique_urls - localhost_urls}. Please change values or use 'ignore_nonlocal_brokers=True'\"\n        )\n\n    unique_local_brokers_to_start = [\n        x for x in unique_brokers_to_start if x[0] in localhost_urls\n    ]\n    return unique_local_brokers_to_start\n\n# %% ../../nbs/002_ApacheKafkaBroker.ipynb 31\nT = TypeVar(\"T\")\n\n\n@asynccontextmanager\nasync def _start_and_stop_brokers(brokers: List[T]) -> AsyncIterator[None]:\n    try:\n        retvals = [await _start_broker(broker) for broker in brokers]\n        exceptions = [x for x in retvals if isinstance(x, Exception)]\n\n        if exceptions:\n            raise RuntimeError(exceptions)\n\n        yield\n    finally:\n        retvals = [\n            await _stop_broker(broker)\n            for broker in retvals\n            if not isinstance(broker, Exception)\n        ]\n        exceptions = [x for x in retvals if isinstance(x, Exception)]\n\n        if exceptions:\n            raise RuntimeError(exceptions)\n\n\n@asynccontextmanager\nasync def start_apache_kafka_brokers(\n    kafka_brokers_name: str,\n    kafka_brokers: List[Dict[str, Dict[str, Any]]],\n    duplicate_ok: bool = False,\n    zookeeper_ports: List[int] = [2181],\n    ignore_nonlocal_brokers: bool = False,\n) -> AsyncIterator[None]:\n    unique_local_brokers_to_start = await _get_unique_local_brokers_to_start(\n        kafka_brokers_name=kafka_brokers_name,\n        kafka_brokers=kafka_brokers,\n        duplicate_ok=duplicate_ok,\n        ignore_nonlocal_brokers=ignore_nonlocal_brokers,\n    )\n\n    if len(zookeeper_ports) < len(unique_local_brokers_to_start):\n        raise ValueError(\n            f\"Atleast {len(unique_local_brokers_to_start)} zookeeper ports are needed to start kafka. Current zookeeper_ports length is {len(zookeeper_ports)}\"\n        )\n\n    brokers = [\n        ApacheKafkaBroker(listener_port=broker[1], zookeeper_port=zookeeper_port)  # type: ignore\n        for broker, zookeeper_port in zip(\n            unique_local_brokers_to_start, zookeeper_ports\n        )\n    ]\n\n    async with _start_and_stop_brokers(brokers=brokers):\n        yield\n"
  },
  {
    "path": "fastkafka/_testing/in_memory_broker.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/001_InMemoryBroker.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'KafkaRecord', 'KafkaPartition', 'KafkaTopic', 'split_list', 'GroupMetadata', 'InMemoryBroker',\n           'InMemoryConsumer', 'InMemoryProducer', 'MockBatch']\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 1\nimport asyncio\nimport hashlib\nimport random\nimport string\nimport uuid\nfrom contextlib import contextmanager\nfrom dataclasses import dataclass, field\nfrom typing import *\nfrom types import ModuleType\n\nfrom aiokafka.structs import ConsumerRecord, RecordMetadata, TopicPartition\n\nimport fastkafka._components.aiokafka_consumer_loop\nimport fastkafka._aiokafka_imports\nfrom .._aiokafka_imports import AIOKafkaConsumer, AIOKafkaProducer\nfrom .._components.logger import get_logger\nfrom fastkafka._components.meta import (\n    _get_default_kwargs_from_sig,\n    classcontextmanager,\n    delegates,\n    patch,\n)\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 3\nlogger = get_logger(__name__)\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 6\n@dataclass\nclass KafkaRecord:\n    topic: str = \"\"\n    partition: int = 0\n    key: Optional[bytes] = None\n    value: bytes = b\"\"\n    offset: int = 0\n    timestamp = 0\n    timestamp_type = 0\n    checksum = 0\n    serialized_key_size = 0\n    serialized_value_size = 0\n    headers: Sequence[Tuple[str, bytes]] = field(default_factory=list)\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 7\nclass KafkaPartition:\n    def __init__(self, *, partition: int, topic: str):\n        \"\"\"\n        Initialize a KafkaPartition object.\n\n        Args:\n            partition: The partition number.\n            topic: The topic name.\n        \"\"\"\n        self.partition = partition\n        self.topic = topic\n        self.messages: List[KafkaRecord] = list()\n\n    def write(self, value: bytes, key: Optional[bytes] = None) -> RecordMetadata:  # type: ignore\n        \"\"\"\n        Write a Kafka record to the partition.\n\n        Args:\n            value: The value of the record.\n            key: The key of the record.\n\n        Returns:\n            The record metadata.\n        \"\"\"\n        record = KafkaRecord(\n            topic=self.topic,\n            partition=self.partition,\n            value=value,\n            key=key,\n            offset=len(self.messages),\n        )\n        record_meta = RecordMetadata(\n            topic=self.topic,\n            partition=self.partition,\n            topic_partition=TopicPartition(topic=self.topic, partition=self.partition),\n            offset=len(self.messages),\n            timestamp=1680602752070,\n            timestamp_type=0,\n            log_start_offset=0,\n        )\n        self.messages.append(record)\n        return record_meta\n\n    def read(self, offset: int) -> Tuple[List[KafkaRecord], int]:\n        \"\"\"\n        Read Kafka records from the partition starting from the given offset.\n\n        Args:\n            offset: The starting offset.\n\n        Returns:\n            A tuple containing the list of records and the current offset.\n        \"\"\"\n        return self.messages[offset:], len(self.messages)\n\n    def latest_offset(self) -> int:\n        \"\"\"\n        Get the latest offset of the partition.\n\n        Returns:\n            The latest offset.\n        \"\"\"\n        return len(self.messages)\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 11\nclass KafkaTopic:\n    def __init__(self, topic: str, num_partitions: int = 1):\n        \"\"\"\n        Initialize a KafkaTopic object.\n\n        Args:\n            topic: The topic name.\n            num_partitions: The number of partitions in the topic (default: 1).\n        \"\"\"\n        self.topic = topic\n        self.num_partitions = num_partitions\n        self.partitions: List[KafkaPartition] = [\n            KafkaPartition(topic=topic, partition=partition_index)\n            for partition_index in range(num_partitions)\n        ]\n\n    def read(  # type: ignore\n        self, partition: int, offset: int\n    ) -> Tuple[TopicPartition, List[KafkaRecord], int]:\n        \"\"\"\n        Read records from the specified partition and offset.\n\n        Args:\n            partition: The partition index.\n            offset: The offset from which to start reading.\n\n        Returns:\n            A tuple containing the topic partition, list of Kafka records, and the new offset.\n        \"\"\"\n        topic_partition = TopicPartition(topic=self.topic, partition=partition)\n        records, offset = self.partitions[partition].read(offset)\n        return topic_partition, records, offset\n\n    def write_with_partition(  # type: ignore\n        self,\n        value: bytes,\n        partition: int,\n    ) -> RecordMetadata:\n        \"\"\"\n        Write a record with a specified partition.\n\n        Args:\n            value: The value of the record.\n            partition: The partition to write the record to.\n\n        Returns:\n            The metadata of the written record.\n        \"\"\"\n        return self.partitions[partition].write(value)\n\n    def write_with_key(self, value: bytes, key: bytes) -> RecordMetadata:  # type: ignore\n        \"\"\"\n        Write a record with a specified key.\n\n        Args:\n            value: The value of the record.\n            key: The key of the record.\n\n        Returns:\n            The metadata of the written record.\n        \"\"\"\n        partition = int(hashlib.sha256(key).hexdigest(), 16) % self.num_partitions\n        return self.partitions[partition].write(value, key=key)\n\n    def write(  # type: ignore\n        self,\n        value: bytes,\n        *,\n        key: Optional[bytes] = None,\n        partition: Optional[int] = None,\n    ) -> RecordMetadata:\n        \"\"\"\n        Write a record to the topic.\n\n        Args:\n            value: The value of the record.\n            key: The key of the record (optional).\n            partition: The partition to write the record to (optional).\n\n        Returns:\n            The metadata of the written record.\n        \"\"\"\n        if partition is not None:\n            return self.write_with_partition(value, partition)\n\n        if key is not None:\n            return self.write_with_key(value, key)\n\n        partition = random.randint(0, self.num_partitions - 1)  # nosec\n        return self.write_with_partition(value, partition)\n\n    def latest_offset(self, partition: int) -> int:\n        \"\"\"\n        Get the latest offset of a partition.\n\n        Args:\n            partition: The partition index.\n\n        Returns:\n            The latest offset of the partition.\n        \"\"\"\n        return self.partitions[partition].latest_offset()\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 17\ndef split_list(list_to_split: List[Any], split_size: int) -> List[List[Any]]:\n    \"\"\"\n    Split a list into smaller lists of a specified size.\n\n    Args:\n        list_to_split: The list to split.\n        split_size: The size of each split.\n\n    Returns:\n        A list of smaller lists.\n    \"\"\"\n    return [\n        list_to_split[start_index : start_index + split_size]\n        for start_index in range(0, len(list_to_split), split_size)\n    ]\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 19\nclass GroupMetadata:\n    def __init__(self, num_partitions: int):\n        \"\"\"\n        Initialize a GroupMetadata object.\n\n        Args:\n            num_partitions: The number of partitions in the group.\n        \"\"\"\n        self.num_partitions = num_partitions\n        self.partitions_offsets: Dict[int, int] = {}\n        self.consumer_ids: List[uuid.UUID] = list()\n        self.partition_assignments: Dict[uuid.UUID, List[int]] = {}\n\n    def subscribe(self, consumer_id: uuid.UUID) -> None:\n        \"\"\"\n        Subscribe a consumer to the group.\n\n        Args:\n            consumer_id: The ID of the consumer.\n        \"\"\"\n        self.consumer_ids.append(consumer_id)\n        self.rebalance()\n\n    def unsubscribe(self, consumer_id: uuid.UUID) -> None:\n        \"\"\"\n        Unsubscribe a consumer from the group.\n\n        Args:\n            consumer_id: The ID of the consumer.\n        \"\"\"\n        self.consumer_ids.remove(consumer_id)\n        self.rebalance()\n\n    def rebalance(self) -> None:\n        \"\"\"\n        Rebalance the group's partition assignments.\n        \"\"\"\n        if len(self.consumer_ids) == 0:\n            self.partition_assignments = {}\n        else:\n            partitions_per_actor = self.num_partitions // len(self.consumer_ids)\n            if self.num_partitions % len(self.consumer_ids) != 0:\n                partitions_per_actor += 1\n            self.assign_partitions(partitions_per_actor)\n\n    def assign_partitions(self, partitions_per_actor: int) -> None:\n        partitions = [i for i in range(self.num_partitions)]\n\n        partitions_split = split_list(partitions, partitions_per_actor)\n        self.partition_assignments = {\n            self.consumer_ids[i]: partition_split\n            for i, partition_split in enumerate(partitions_split)\n        }\n\n    def get_partitions(\n        self, consumer_id: uuid.UUID\n    ) -> Tuple[List[int], Dict[int, Optional[int]]]:\n        \"\"\"\n        Get the partition assignments and offsets for a consumer.\n\n        Args:\n            consumer_id: The ID of the consumer.\n\n        Returns:\n            A tuple containing the partition assignments and offsets.\n        \"\"\"\n        partition_assignments = self.partition_assignments.get(consumer_id, [])\n        partition_offsets_assignments = {\n            partition: self.partitions_offsets.get(partition, None)\n            for partition in partition_assignments\n        }\n        return partition_assignments, partition_offsets_assignments\n\n    def set_offset(self, partition: int, offset: int) -> None:\n        \"\"\"\n        Set the offset for a partition.\n\n        Args:\n            partition: The partition index.\n            offset: The offset to set.\n        \"\"\"\n        self.partitions_offsets[partition] = offset\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 22\n@classcontextmanager()\nclass InMemoryBroker:\n    def __init__(\n        self,\n        num_partitions: int = 1,\n    ):\n        self.num_partitions = num_partitions\n        self.topics: Dict[Tuple[str, str], KafkaTopic] = {}\n        self.topic_groups: Dict[Tuple[str, str, str], GroupMetadata] = {}\n        self.is_started: bool = False\n\n    def connect(self) -> uuid.UUID:\n        return uuid.uuid4()\n\n    def dissconnect(self, consumer_id: uuid.UUID) -> None:\n        \"\"\"\n        Disconnect a consumer from the broker.\n\n        Args:\n            consumer_id: The ID of the consumer.\n        \"\"\"\n        pass\n\n    def subscribe(\n        self, bootstrap_server: str, topic: str, group: str, consumer_id: uuid.UUID\n    ) -> None:\n        raise NotImplementedError()\n\n    def unsubscribe(\n        self, bootstrap_server: str, topic: str, group: str, consumer_id: uuid.UUID\n    ) -> None:\n        raise NotImplementedError()\n\n    def read(  # type: ignore\n        self,\n        *,\n        bootstrap_server: str,\n        topic: str,\n        group: str,\n        consumer_id: uuid.UUID,\n        auto_offset_reset: str,\n    ) -> Dict[TopicPartition, List[KafkaRecord]]:\n        raise NotImplementedError()\n\n    def write(  # type: ignore\n        self,\n        *,\n        bootstrap_server: str,\n        topic: str,\n        value: bytes,\n        key: Optional[bytes] = None,\n        partition: Optional[int] = None,\n    ) -> RecordMetadata:\n        raise NotImplementedError()\n\n    @contextmanager\n    def lifecycle(self) -> Iterator[\"InMemoryBroker\"]:\n        \"\"\"\n        Context manager for the lifecycle of the in-memory broker.\n\n        Yields:\n            An instance of the in-memory broker.\n        \"\"\"\n        raise NotImplementedError()\n\n    async def _start(self) -> str:\n        \"\"\"\n        Start the in-memory broker.\n\n        Returns:\n            The address of the broker.\n        \"\"\"\n        logger.info(\"InMemoryBroker._start() called\")\n        self.__enter__()  # type: ignore\n        return \"localbroker:0\"\n\n    async def _stop(self) -> None:\n        \"\"\"\n        Stop the in-memory broker.\n        \"\"\"\n        logger.info(\"InMemoryBroker._stop() called\")\n        self.__exit__(None, None, None)  # type: ignore\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 23\n@patch\ndef subscribe(\n    self: InMemoryBroker,\n    bootstrap_server: str,\n    topic: str,\n    group: str,\n    consumer_id: uuid.UUID,\n) -> None:\n    \"\"\"\n    Subscribe a consumer to a topic group.\n\n    Args:\n        bootstrap_server: The bootstrap server address.\n        topic: The topic to subscribe to.\n        group: The group to join.\n        consumer_id: The ID of the consumer.\n    \"\"\"\n    if (bootstrap_server, topic) not in self.topics:\n        self.topics[(bootstrap_server, topic)] = KafkaTopic(\n            topic=topic, num_partitions=self.num_partitions\n        )\n\n    group_meta = self.topic_groups.get(\n        (bootstrap_server, topic, group), GroupMetadata(self.num_partitions)\n    )\n    group_meta.subscribe(consumer_id)\n    self.topic_groups[(bootstrap_server, topic, group)] = group_meta\n\n\n@patch\ndef unsubscribe(\n    self: InMemoryBroker,\n    bootstrap_server: str,\n    topic: str,\n    group: str,\n    consumer_id: uuid.UUID,\n) -> None:\n    \"\"\"\n    Unsubscribe a consumer from a topic group.\n\n    Args:\n        bootstrap_server: The bootstrap server address.\n        topic: The topic to unsubscribe from.\n        group: The group to leave.\n        consumer_id: The ID of the consumer.\n    \"\"\"\n    self.topic_groups[(bootstrap_server, topic, group)].unsubscribe(consumer_id)\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 25\n@patch\ndef write(  # type: ignore\n    self: InMemoryBroker,\n    *,\n    bootstrap_server: str,\n    topic: str,\n    value: bytes,\n    key: Optional[bytes] = None,\n    partition: Optional[int] = None,\n) -> RecordMetadata:\n    \"\"\"\n    Write a message to a topic.\n\n    Args:\n        bootstrap_server: The bootstrap server address.\n        topic: The topic to write the message to.\n        value: The value of the message.\n        key: The key associated with the message.\n        partition: The partition ID to write the message to.\n\n    Returns:\n        The metadata of the written message.\n    \"\"\"\n    if (bootstrap_server, topic) not in self.topics:\n        self.topics[(bootstrap_server, topic)] = KafkaTopic(\n            topic=topic, num_partitions=self.num_partitions\n        )\n\n    return self.topics[(bootstrap_server, topic)].write(\n        value, key=key, partition=partition\n    )\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 27\n@patch\ndef read(  # type: ignore\n    self: InMemoryBroker,\n    *,\n    bootstrap_server: str,\n    topic: str,\n    group: str,\n    consumer_id: uuid.UUID,\n    auto_offset_reset: str,\n) -> Dict[TopicPartition, List[KafkaRecord]]:\n    \"\"\"\n    Read messages from a topic group.\n\n    Args:\n        bootstrap_server: The bootstrap server address.\n        topic: The topic to read messages from.\n        group: The group to read messages for.\n        consumer_id: The ID of the consumer.\n        auto_offset_reset: The strategy to use when the consumer does not have a valid offset for the group.\n\n    Returns:\n        A dictionary containing the messages retrieved from each topic partition.\n    \"\"\"\n    group_meta = self.topic_groups[(bootstrap_server, topic, group)]\n    partitions, offsets = group_meta.get_partitions(consumer_id)\n\n    if len(partitions) == 0:\n        return {}\n\n    partitions_data = {}\n\n    for partition in partitions:\n        offset = offsets[partition]\n\n        if offset is None:\n            offset = (\n                self.topics[(bootstrap_server, topic)].latest_offset(partition)\n                if auto_offset_reset == \"latest\"\n                else 0\n            )\n\n        topic_partition, data, offset = self.topics[(bootstrap_server, topic)].read(\n            partition, offset\n        )\n\n        partitions_data[topic_partition] = data\n        group_meta.set_offset(partition, offset)\n\n    return partitions_data\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 34\n# InMemoryConsumer\nclass InMemoryConsumer:\n    def __init__(\n        self,\n        broker: InMemoryBroker,\n    ) -> None:\n        self.broker = broker\n        self._id: Optional[uuid.UUID] = None\n        self._auto_offset_reset: str = \"latest\"\n        self._group_id: Optional[str] = None\n        self._topics: List[str] = list()\n        self._bootstrap_servers = \"\"\n\n    @delegates(AIOKafkaConsumer)\n    def __call__(self, **kwargs: Any) -> \"InMemoryConsumer\":\n        defaults = _get_default_kwargs_from_sig(InMemoryConsumer.__call__, **kwargs)\n        consume_copy = InMemoryConsumer(self.broker)\n        consume_copy._auto_offset_reset = defaults[\"auto_offset_reset\"]\n        consume_copy._bootstrap_servers = (\n            \"\".join(defaults[\"bootstrap_servers\"])\n            if isinstance(defaults[\"bootstrap_servers\"], list)\n            else defaults[\"bootstrap_servers\"]\n        )\n\n        consume_copy._group_id = (\n            defaults[\"group_id\"]\n            if defaults[\"group_id\"] is not None\n            else \"\".join(random.choices(string.ascii_letters, k=10))  # nosec\n        )\n        return consume_copy\n\n    @delegates(AIOKafkaConsumer.start)\n    async def start(self, **kwargs: Any) -> None:\n        pass\n\n    @delegates(AIOKafkaConsumer.stop)\n    async def stop(self, **kwargs: Any) -> None:\n        pass\n\n    @delegates(AIOKafkaConsumer.subscribe)\n    def subscribe(self, topics: List[str], **kwargs: Any) -> None:\n        raise NotImplementedError()\n\n    @delegates(AIOKafkaConsumer.getmany)\n    async def getmany(  # type: ignore\n        self, **kwargs: Any\n    ) -> Dict[TopicPartition, List[ConsumerRecord]]:\n        raise NotImplementedError()\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 37\n@patch\n@delegates(AIOKafkaConsumer.start)\nasync def start(self: InMemoryConsumer, **kwargs: Any) -> None:\n    \"\"\"\n    Start consuming messages from the connected broker.\n\n    Raises:\n        RuntimeError: If start() has already been called without calling stop() first.\n    \"\"\"\n    logger.info(\"AIOKafkaConsumer patched start() called()\")\n    if self._id is not None:\n        raise RuntimeError(\n            \"Consumer start() already called! Run consumer stop() before running start() again\"\n        )\n    self._id = self.broker.connect()\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 40\n@patch  # type: ignore\n@delegates(AIOKafkaConsumer.subscribe)\ndef subscribe(self: InMemoryConsumer, topics: List[str], **kwargs: Any) -> None:\n    \"\"\"\n    Subscribe to a list of topics for consuming messages.\n\n    Args:\n        topics: A list of topics to subscribe to.\n\n    Raises:\n        RuntimeError: If start() has not been called before calling subscribe().\n    \"\"\"\n    logger.info(\"AIOKafkaConsumer patched subscribe() called\")\n    if self._id is None:\n        raise RuntimeError(\"Consumer start() not called! Run consumer start() first\")\n    logger.info(f\"AIOKafkaConsumer.subscribe(), subscribing to: {topics}\")\n    for topic in topics:\n        self.broker.subscribe(\n            bootstrap_server=self._bootstrap_servers,\n            consumer_id=self._id,\n            topic=topic,\n            group=self._group_id,  # type: ignore\n        )\n        self._topics.append(topic)\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 43\n@patch\n@delegates(AIOKafkaConsumer.stop)\nasync def stop(self: InMemoryConsumer, **kwargs: Any) -> None:\n    \"\"\"\n    Stop consuming messages from the connected broker.\n\n    Raises:\n        RuntimeError: If start() has not been called before calling stop().\n    \"\"\"\n    logger.info(\"AIOKafkaConsumer patched stop() called\")\n    if self._id is None:\n        raise RuntimeError(\"Consumer start() not called! Run consumer start() first\")\n    for topic in self._topics:\n        self.broker.unsubscribe(\n            bootstrap_server=self._bootstrap_servers,\n            topic=topic,\n            group=self._group_id,  # type: ignore\n            consumer_id=self._id,\n        )\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 46\n@patch\n@delegates(AIOKafkaConsumer.getmany)\nasync def getmany(  # type: ignore\n    self: InMemoryConsumer, **kwargs: Any\n) -> Dict[TopicPartition, List[ConsumerRecord]]:\n    \"\"\"\n    Retrieve messages from the subscribed topics.\n\n    Returns:\n        A dictionary containing the retrieved messages from each topic partition.\n\n    Raises:\n        RuntimeError: If start() has not been called before calling getmany().\n    \"\"\"\n    await asyncio.sleep(0)\n    for topic in self._topics:\n        return self.broker.read(\n            bootstrap_server=self._bootstrap_servers,\n            topic=topic,\n            consumer_id=self._id,  # type: ignore\n            group=self._group_id,  # type: ignore\n            auto_offset_reset=self._auto_offset_reset,\n        )\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 49\nclass InMemoryProducer:\n    def __init__(self, broker: InMemoryBroker, **kwargs: Any) -> None:\n        self.broker = broker\n        self.id: Optional[uuid.UUID] = None\n        self._bootstrap_servers = \"\"\n\n    @delegates(AIOKafkaProducer)\n    def __call__(self, **kwargs: Any) -> \"InMemoryProducer\":\n        defaults = _get_default_kwargs_from_sig(InMemoryConsumer.__call__, **kwargs)\n        producer_copy = InMemoryProducer(self.broker)\n        producer_copy._bootstrap_servers = (\n            \"\".join(defaults[\"bootstrap_servers\"])\n            if isinstance(defaults[\"bootstrap_servers\"], list)\n            else defaults[\"bootstrap_servers\"]\n        )\n        return producer_copy\n\n    @delegates(AIOKafkaProducer.start)\n    async def start(self, **kwargs: Any) -> None:\n        raise NotImplementedError()\n\n    @delegates(AIOKafkaProducer.stop)\n    async def stop(self, **kwargs: Any) -> None:\n        raise NotImplementedError()\n\n    @delegates(AIOKafkaProducer.send)\n    async def send(  # type: ignore\n        self,\n        topic: str,\n        msg: bytes,\n        key: Optional[bytes] = None,\n        **kwargs: Any,\n    ):\n        raise NotImplementedError()\n\n    @delegates(AIOKafkaProducer.partitions_for)\n    async def partitions_for(self, topic: str) -> List[int]:\n        raise NotImplementedError()\n\n    @delegates(AIOKafkaProducer._partition)\n    def _partition(\n        self, topic: str, arg1: Any, arg2: Any, arg3: Any, key: bytes, arg4: Any\n    ) -> int:\n        raise NotImplementedError()\n\n    @delegates(AIOKafkaProducer.create_batch)\n    def create_batch(self) -> \"MockBatch\":\n        raise NotImplementedError()\n\n    @delegates(AIOKafkaProducer.send_batch)\n    async def send_batch(self, batch: \"MockBatch\", topic: str, partition: Any) -> None:\n        raise NotImplementedError()\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 52\n@patch  # type: ignore\n@delegates(AIOKafkaProducer.start)\nasync def start(self: InMemoryProducer, **kwargs: Any) -> None:\n    \"\"\"\n    Start the in-memory producer.\n\n    Raises:\n        RuntimeError: If start() has already been called without calling stop() first.\n    \"\"\"\n    logger.info(\"AIOKafkaProducer patched start() called()\")\n    if self.id is not None:\n        raise RuntimeError(\n            \"Producer start() already called! Run producer stop() before running start() again\"\n        )\n    self.id = self.broker.connect()\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 55\n@patch  # type: ignore\n@delegates(AIOKafkaProducer.stop)\nasync def stop(self: InMemoryProducer, **kwargs: Any) -> None:\n    \"\"\"\n    Stop the in-memory producer.\n\n    Raises:\n        RuntimeError: If start() has not been called before calling stop().\n    \"\"\"\n    logger.info(\"AIOKafkaProducer patched stop() called\")\n    if self.id is None:\n        raise RuntimeError(\"Producer start() not called! Run producer start() first\")\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 58\n@patch\n@delegates(AIOKafkaProducer.send)\nasync def send(  # type: ignore\n    self: InMemoryProducer,\n    topic: str,\n    msg: bytes,\n    key: Optional[bytes] = None,\n    partition: Optional[int] = None,\n    **kwargs: Any,\n):  # asyncio.Task[RecordMetadata]\n    \"\"\"\n    Send a message to the specified topic.\n\n    Args:\n        topic: The topic to send the message to.\n        msg: The message to send.\n        key: The key associated with the message (optional).\n        partition: The partition to send the message to (optional).\n        **kwargs: Additional arguments to be passed to AIOKafkaProducer.send().\n\n    Returns:\n        A task that resolves to the RecordMetadata of the sent message.\n\n    Raises:\n        RuntimeError: If start() has not been called before calling send().\n    \"\"\"\n    if self.id is None:\n        raise RuntimeError(\"Producer start() not called! Run producer start() first\")\n\n    record = self.broker.write(\n        bootstrap_server=self._bootstrap_servers,\n        topic=topic,\n        value=msg,\n        key=key,\n        partition=partition,\n    )\n\n    async def _f(record: ConsumerRecord = record) -> RecordMetadata:  # type: ignore\n        return record\n\n    return asyncio.create_task(_f())\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 60\n@patch\n@delegates(AIOKafkaProducer.partitions_for)\nasync def partitions_for(self: InMemoryProducer, topic: str) -> List[int]:\n    \"\"\"\n    Retrieve the list of partitions for the specified topic.\n\n    Args:\n        topic: The topic to get the partitions for.\n\n    Returns:\n        A list of partition IDs.\n    \"\"\"\n    return [i for i in range(self.broker.num_partitions)]\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 62\n@patch\n@delegates(AIOKafkaProducer._partition)\ndef _partition(\n    self: InMemoryProducer,\n    topic: str,\n    arg1: Any,\n    arg2: Any,\n    arg3: Any,\n    key: bytes,\n    arg4: Any,\n) -> int:\n    \"\"\"\n    Determine the partition to which the message should be sent.\n\n    Args:\n        topic: The topic to send the message to.\n        arg1, arg2, arg3, arg4: Additional arguments passed to the original AIOKafkaProducer._partition().\n\n    Returns:\n        The partition ID.\n    \"\"\"\n    return int(hashlib.sha256(key).hexdigest(), 16) % self.broker.num_partitions\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 64\nclass MockBatch:\n    def __init__(self) -> None:\n        \"\"\"\n        Initialize an instance of MockBatch.\n        \"\"\"\n        self._batch: List[Tuple] = list()\n\n    def append(  # type: ignore\n        self, key: Optional[bytes], value: bytes, timestamp: int\n    ) -> RecordMetadata:\n        \"\"\"\n        Append a message to the batch.\n\n        Args:\n            key: The key associated with the message (optional).\n            value: The value of the message.\n            timestamp: The timestamp of the message.\n\n        Returns:\n            The RecordMetadata of the appended message.\n        \"\"\"\n        self._batch.append((key, value))\n        return RecordMetadata(\n            topic=\"\",\n            partition=0,\n            topic_partition=None,\n            offset=0,\n            timestamp=timestamp,\n            timestamp_type=0,\n            log_start_offset=0,\n        )\n\n\n@patch\n@delegates(AIOKafkaProducer.create_batch)\ndef create_batch(self: InMemoryProducer) -> \"MockBatch\":\n    \"\"\"\n    Create a mock batch for the in-memory producer.\n\n    Returns:\n        A MockBatch instance.\n    \"\"\"\n    return MockBatch()\n\n\n@patch\n@delegates(AIOKafkaProducer.send_batch)\nasync def send_batch(\n    self: InMemoryProducer, batch: \"MockBatch\", topic: str, partition: Any\n) -> None:\n    \"\"\"\n    Send a batch of messages to the specified topic and partition.\n\n    Args:\n        batch: The MockBatch containing the messages to send.\n        topic: The topic to send the batch of messages to.\n        partition: The partition to send the batch of messages to.\n    \"\"\"\n    for record in batch._batch:\n        self.broker.write(\n            bootstrap_server=self._bootstrap_servers,\n            topic=topic,\n            value=record[1],\n            key=record[0],\n            partition=partition,\n        )\n\n# %% ../../nbs/001_InMemoryBroker.ipynb 68\n@patch\n@contextmanager\ndef lifecycle(self: InMemoryBroker) -> Iterator[InMemoryBroker]:\n    \"\"\"\n    Context manager for the lifecycle of the in-memory broker.\n\n    Yields:\n        An instance of the in-memory broker.\n    \"\"\"\n    logger.info(\n        \"InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\"\n    )\n    try:\n        logger.info(\"InMemoryBroker starting\")\n\n        old_consumer = fastkafka._aiokafka_imports.AIOKafkaConsumer\n        old_producer = fastkafka._aiokafka_imports.AIOKafkaProducer\n\n        fastkafka._aiokafka_imports.AIOKafkaConsumer = InMemoryConsumer(self)\n        fastkafka._aiokafka_imports.AIOKafkaProducer = InMemoryProducer(self)\n\n        self.is_started = True\n        yield self\n    finally:\n        logger.info(\"InMemoryBroker stopping\")\n\n        fastkafka._aiokafka_imports.AIOKafkaConsumer = old_consumer\n        fastkafka._aiokafka_imports.AIOKafkaProducer = old_producer\n\n        self.is_started = False\n"
  },
  {
    "path": "fastkafka/_testing/local_redpanda_broker.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/003_LocalRedpandaBroker.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'get_redpanda_docker_cmd', 'LocalRedpandaBroker', 'check_docker', 'start_redpanda_brokers']\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 1\nimport asyncio\nfrom pathlib import Path\nfrom contextlib import asynccontextmanager\nfrom tempfile import TemporaryDirectory\nfrom typing import *\n\nimport asyncer\nimport nest_asyncio\n\nfrom .._components._subprocess import terminate_asyncio_process\nfrom .._components.helpers import in_notebook\nfrom .._components.logger import get_logger\nfrom .._components.meta import delegates, export, patch\nfrom fastkafka._testing.apache_kafka_broker import (\n    get_free_port,\n    run_and_match,\n    _get_unique_local_brokers_to_start,\n    _start_and_stop_brokers,\n)\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 3\nif in_notebook():\n    from tqdm.notebook import tqdm\nelse:\n    from tqdm import tqdm\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 6\ndef get_redpanda_docker_cmd(\n    listener_port: int = 9092,\n    tag: str = \"v23.1.2\",\n    seastar_core: int = 1,\n    memory: str = \"1G\",\n    mode: str = \"dev-container\",\n    default_log_level: str = \"debug\",\n) -> List[str]:\n    \"\"\"\n    Generates a Docker CLI command to start redpanda container\n\n    Args:\n        listener_port: Port on which the clients (producers and consumers) can connect\n        tag: Tag of Redpanda image to use to start container\n        seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n        memory: The amount of memory to make available to Redpanda\n        mode: Mode to use to load configuration properties in container\n        default_log_level: Log levels to use for Redpanda\n    \"\"\"\n    redpanda_docker_cmd = [\n        \"docker\",\n        \"run\",\n        \"--rm\",\n        \"--name\",\n        f\"redpanda_{listener_port}\",\n        \"-p\",\n        f\"{listener_port}:{listener_port}\",\n        f\"docker.redpanda.com/redpandadata/redpanda:{tag}\",\n        \"redpanda\",\n        \"start\",\n        \"--kafka-addr\",\n        f\"internal://0.0.0.0:9090,external://0.0.0.0:{listener_port}\",\n        \"--advertise-kafka-addr\",\n        f\"internal://localhost:9090,external://localhost:{listener_port}\",\n        \"--smp\",\n        str(seastar_core),\n        \"--memory\",\n        memory,\n        \"--mode\",\n        mode,\n        \"--default-log-level\",\n        default_log_level,\n    ]\n    return redpanda_docker_cmd\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 8\n@export(\"fastkafka.testing\")\nclass LocalRedpandaBroker:\n    \"\"\"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.\"\"\"\n\n    @delegates(get_redpanda_docker_cmd, keep=True)\n    def __init__(\n        self,\n        topics: Iterable[str] = [],\n        *,\n        retries: int = 3,\n        apply_nest_asyncio: bool = False,\n        **kwargs: Dict[str, Any],\n    ):\n        \"\"\"Initialises the LocalRedpandaBroker object\n\n        Args:\n            topics: List of topics to create after sucessfull redpanda broker startup\n            retries: Number of retries to create redpanda service\n            apply_nest_asyncio: set to True if running in notebook\n            listener_port: Port on which the clients (producers and consumers) can connect\n            tag: Tag of Redpanda image to use to start container\n            seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\n            memory: The amount of memory to make available to Redpanda\n            mode: Mode to use to load configuration properties in container\n            default_log_level: Log levels to use for Redpanda\n        \"\"\"\n        self.redpanda_kwargs = kwargs\n\n        if \"listener_port\" not in self.redpanda_kwargs:\n            self.redpanda_kwargs[\"listener_port\"] = 9092  # type: ignore\n\n        self.retries = retries\n        self.apply_nest_asyncio = apply_nest_asyncio\n        self.temporary_directory: Optional[TemporaryDirectory] = None\n        self.temporary_directory_path: Optional[Path] = None\n        self.redpanda_task: Optional[asyncio.subprocess.Process] = None\n        self._is_started = False\n        self.topics: Iterable[str] = topics\n\n    @property\n    def is_started(self) -> bool:\n        \"\"\"Property indicating whether the LocalRedpandaBroker object is started.\n\n        The is_started property indicates if the LocalRedpandaBroker object is currently\n        in a started state. This implies that Redpanda docker container has sucesfully\n        started and is ready for handling events.\n\n        Returns:\n            bool: True if the object is started, False otherwise.\n        \"\"\"\n        return self._is_started\n\n    @classmethod\n    async def _check_deps(cls) -> None:\n        \"\"\"Prepares the environment for running redpanda brokers.\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    async def _start(self) -> str:\n        \"\"\"Starts a local redpanda broker instance asynchronously\n        Returns:\n           Redpanda broker bootstrap server address in string format: add:port\n        \"\"\"\n        raise NotImplementedError\n\n    def start(self) -> str:\n        \"\"\"Starts a local redpanda broker instance synchronously\n        Returns:\n           Redpanda broker bootstrap server address in string format: add:port\n        \"\"\"\n        raise NotImplementedError\n\n    def stop(self) -> None:\n        \"\"\"Stops a local redpanda broker instance synchronously\"\"\"\n        raise NotImplementedError\n\n    async def _stop(self) -> None:\n        \"\"\"Stops a local redpanda broker instance synchronously\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    def get_service_config_string(self, service: str, *, data_dir: Path) -> str:\n        \"\"\"Generates a configuration for a service\n        Args:\n            data_dir: Path to the directory where the zookeepeer instance will save data\n            service: \"redpanda\", defines which service to get config string for\n        \"\"\"\n        raise NotImplementedError\n\n    async def _start_redpanda(self) -> None:\n        \"\"\"Start a local redpanda broker\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    async def _create_topics(self) -> None:\n        \"\"\"Create missing topics in local redpanda broker\n        Returns:\n           None\n        \"\"\"\n        raise NotImplementedError\n\n    def __enter__(self) -> str:\n        return self.start()\n\n    def __exit__(self, *args: Any, **kwargs: Any) -> None:\n        self.stop()\n\n    async def __aenter__(self) -> str:\n        return await self._start()\n\n    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\n        await self._stop()\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 10\nasync def check_docker(tag: str = \"v23.1.2\") -> bool:\n    \"\"\"\n    Checks if a Docker image with the specified tag is available.\n\n    Args:\n        tag: The tag of the Docker image to check. Defaults to \"v23.1.2\".\n\n    Returns:\n        bool: True if the Docker image is available; False otherwise.\n    \"\"\"\n    try:\n        docker_task = await run_and_match(\n            \"docker\",\n            \"pull\",\n            f\"docker.redpanda.com/redpandadata/redpanda:{tag}\",\n            pattern=f\"docker.redpanda.com/redpandadata/redpanda:{tag}\",\n        )\n        return True\n    except Exception as e:\n        logger.debug(f\"Error in check_docker() : {e}\")\n        return False\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 12\n@patch(cls_method=True)  # type: ignore\nasync def _check_deps(cls: LocalRedpandaBroker) -> None:\n    if not await check_docker():\n        raise RuntimeError(\n            \"Docker installation not found! Please install docker manually and retry.\"\n        )\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 15\n@patch\nasync def _start_redpanda(self: LocalRedpandaBroker, service: str = \"redpanda\") -> None:\n    logger.info(f\"Starting {service}...\")\n\n    if self.temporary_directory_path is None:\n        raise ValueError(\n            \"LocalRedpandaBroker._start_redpanda(): self.temporary_directory_path is None, did you initialise it?\"\n        )\n\n    configs_tried: List[Dict[str, Any]] = []\n\n    for i in range(self.retries + 1):\n        configs_tried = configs_tried + [getattr(self, f\"{service}_kwargs\").copy()]\n\n        redpanda_docker_cmd = get_redpanda_docker_cmd(**self.redpanda_kwargs)  # type: ignore\n\n        try:\n            service_task = await run_and_match(\n                *redpanda_docker_cmd,\n                capture=\"stderr\",\n                pattern=\"Bootstrap complete\",\n                timeout=30,\n            )\n        except Exception as e:\n            logger.info(\n                f\"{service} startup failed, generating a new port and retrying...\"\n            )\n            port = get_free_port()\n            self.redpanda_kwargs[\"listener_port\"] = port  # type: ignore\n\n            logger.info(f\"port={port}\")\n        else:\n            setattr(self, f\"{service}_task\", service_task)\n            return\n\n    raise ValueError(f\"Could not start {service} with params: {configs_tried}\")\n\n\n@patch\nasync def _create_topics(self: LocalRedpandaBroker) -> None:\n    listener_port = self.redpanda_kwargs.get(\"listener_port\", 9092)\n\n    async with asyncer.create_task_group() as tg:\n        processes = [\n            tg.soonify(run_and_match)(\n                \"docker\",\n                \"exec\",\n                f\"redpanda_{listener_port}\",\n                \"rpk\",\n                \"topic\",\n                \"create\",\n                topic,\n                pattern=topic,\n                timeout=10,\n            )\n            for topic in self.topics\n        ]\n\n    try:\n        return_values = [\n            await asyncio.wait_for(process.value.wait(), 30) for process in processes\n        ]\n        if any(return_value != 0 for return_value in return_values):\n            raise ValueError(\"Could not create missing topics!\")\n    except asyncio.TimeoutError as _:\n        raise ValueError(\"Timed out while creating missing topics!\")\n\n\n@patch\nasync def _start(self: LocalRedpandaBroker) -> str:\n    await self._check_deps()\n\n    self.temporary_directory = TemporaryDirectory()\n    self.temporary_directory_path = Path(self.temporary_directory.__enter__())\n\n    await self._start_redpanda()\n    await asyncio.sleep(5)\n\n    listener_port = self.redpanda_kwargs.get(\"listener_port\", 9092)\n    bootstrap_server = f\"127.0.0.1:{listener_port}\"\n    logger.info(f\"Local Redpanda broker up and running on {bootstrap_server}\")\n\n    await self._create_topics()\n\n    self._is_started = True\n\n    return bootstrap_server\n\n\n@patch\nasync def _stop(self: LocalRedpandaBroker) -> None:\n    logger.info(f\"Stopping redpanda...\")\n    await terminate_asyncio_process(self.redpanda_task)  # type: ignore\n    logger.info(f\"Redpanda stopped.\")\n    self.temporary_directory.__exit__(None, None, None)  # type: ignore\n    self._is_started = False\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 17\n@patch\ndef start(self: LocalRedpandaBroker) -> str:\n    \"\"\"Starts a local redpanda broker instance synchronously\n    Returns:\n       Redpanda broker bootstrap server address in string format: add:port\n    \"\"\"\n    logger.info(f\"{self.__class__.__name__}.start(): entering...\")\n    try:\n        # get or create loop\n        try:\n            loop = asyncio.get_event_loop()\n        except RuntimeError as e:\n            logger.warning(\n                f\"{self.__class__.__name__}.start(): RuntimeError raised when calling asyncio.get_event_loop(): {e}\"\n            )\n            logger.warning(\n                f\"{self.__class__.__name__}.start(): asyncio.new_event_loop()\"\n            )\n            loop = asyncio.new_event_loop()\n\n        # start redpanda broker in the loop\n\n        if loop.is_running():\n            if self.apply_nest_asyncio:\n                logger.warning(\n                    f\"{self.__class__.__name__}.start(): ({loop}) is already running!\"\n                )\n                logger.warning(\n                    f\"{self.__class__.__name__}.start(): calling nest_asyncio.apply()\"\n                )\n                nest_asyncio.apply(loop)\n            else:\n                msg = f\"{self.__class__.__name__}.start(): ({loop}) is already running! Use 'apply_nest_asyncio=True' when creating 'LocalRedpandaBroker' to prevent this.\"\n                logger.error(msg)\n                raise RuntimeError(msg)\n\n        retval = loop.run_until_complete(self._start())\n        logger.info(f\"{self.__class__}.start(): returning {retval}\")\n        return retval\n    finally:\n        logger.info(f\"{self.__class__.__name__}.start(): exited.\")\n\n\n@patch\ndef stop(self: LocalRedpandaBroker) -> None:\n    \"\"\"Stops a local redpanda broker instance synchronously\"\"\"\n    logger.info(f\"{self.__class__.__name__}.stop(): entering...\")\n    try:\n        if not self._is_started:\n            raise RuntimeError(\n                \"LocalRedpandaBroker not started yet, please call LocalRedpandaBroker.start() before!\"\n            )\n\n        loop = asyncio.get_event_loop()\n        loop.run_until_complete(self._stop())\n    finally:\n        logger.info(f\"{self.__class__.__name__}.stop(): exited.\")\n\n# %% ../../nbs/003_LocalRedpandaBroker.ipynb 21\n@asynccontextmanager\nasync def start_redpanda_brokers(\n    kafka_brokers_name: str,\n    kafka_brokers: List[Dict[str, Dict[str, Any]]],\n    duplicate_ok: bool = False,\n    ignore_nonlocal_brokers: bool = False,\n) -> AsyncIterator[None]:\n    unique_local_brokers_to_start = await _get_unique_local_brokers_to_start(\n        kafka_brokers_name=kafka_brokers_name,\n        kafka_brokers=kafka_brokers,\n        duplicate_ok=duplicate_ok,\n        ignore_nonlocal_brokers=ignore_nonlocal_brokers,\n    )\n\n    brokers = [\n        LocalRedpandaBroker(listener_port=broker[1])  # type: ignore\n        for broker in unique_local_brokers_to_start\n    ]\n\n    async with _start_and_stop_brokers(brokers=brokers):\n        yield\n"
  },
  {
    "path": "fastkafka/_testing/test_utils.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/004_Test_Utils.ipynb.\n\n# %% auto 0\n__all__ = ['logger', 'nb_safe_seed', 'mock_AIOKafkaProducer_send', 'run_script_and_cancel', 'display_docs']\n\n# %% ../../nbs/004_Test_Utils.ipynb 1\nimport asyncio\nimport hashlib\nimport platform\nimport shlex\nimport signal\nimport subprocess  # nosec\nimport unittest\nimport unittest.mock\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom tempfile import TemporaryDirectory\nfrom typing import *\n\nimport asyncer\nfrom IPython.display import IFrame\n\nfrom .._application.app import FastKafka\nfrom .._components._subprocess import terminate_asyncio_process\nfrom .._components.helpers import _import_from_string, change_dir\nfrom .._components.logger import get_logger\n\n# %% ../../nbs/004_Test_Utils.ipynb 4\nlogger = get_logger(__name__)\n\n# %% ../../nbs/004_Test_Utils.ipynb 6\ndef nb_safe_seed(s: str) -> Callable[[int], int]:\n    \"\"\"Gets a unique seed function for a notebook\n\n    Params:\n        s: name of the notebook used to initialize the seed function\n\n    Returns:\n        A unique seed function\n    \"\"\"\n    init_seed = int(hashlib.sha256(s.encode(\"utf-8\")).hexdigest(), 16) % (10**8)\n\n    def _get_seed(x: int = 0, *, init_seed: int = init_seed) -> int:\n        return init_seed + x\n\n    return _get_seed\n\n# %% ../../nbs/004_Test_Utils.ipynb 8\n@contextmanager\ndef mock_AIOKafkaProducer_send() -> Generator[unittest.mock.Mock, None, None]:\n    \"\"\"Mocks **send** method of **AIOKafkaProducer**\"\"\"\n    with unittest.mock.patch(\"__main__.AIOKafkaProducer.send\") as mock:\n\n        async def _f() -> None:\n            pass\n\n        mock.return_value = asyncio.create_task(_f())\n\n        yield mock\n\n# %% ../../nbs/004_Test_Utils.ipynb 9\nasync def run_script_and_cancel(\n    script: str,\n    *,\n    script_file: Optional[str] = None,\n    cmd: Optional[str] = None,\n    cancel_after: int = 10,\n    app_name: str = \"app\",\n    kafka_app_name: str = \"kafka_app\",\n    generate_docs: bool = False,\n) -> Tuple[int, bytes]:\n    \"\"\"\n    Runs a script and cancels it after a predefined time.\n\n    Args:\n        script: A python source code to be executed in a separate subprocess.\n        script_file: Name of the script where script source will be saved.\n        cmd: Command to execute. If None, it will be set to 'python3 -m {Path(script_file).stem}'.\n        cancel_after: Number of seconds before sending SIGTERM signal.\n        app_name: Name of the app.\n        kafka_app_name: Name of the Kafka app.\n        generate_docs: Flag indicating whether to generate docs.\n\n    Returns:\n        A tuple containing the exit code and combined stdout and stderr as a binary string.\n    \"\"\"\n    if script_file is None:\n        script_file = \"script.py\"\n\n    if cmd is None:\n        cmd = f\"python3 -m {Path(script_file).stem}\"\n\n    with TemporaryDirectory() as d:\n        consumer_script = Path(d) / script_file\n\n        with open(consumer_script, \"w\") as file:\n            file.write(script)\n\n        if generate_docs:\n            logger.info(\n                f\"Generating docs for: {Path(script_file).stem}:{kafka_app_name}\"\n            )\n            try:\n                kafka_app: FastKafka = _import_from_string(\n                    f\"{Path(script_file).stem}:{kafka_app_name}\"\n                )\n                await asyncer.asyncify(kafka_app.create_docs)()\n            except Exception as e:\n                logger.warning(\n                    f\"Generating docs failed for: {Path(script_file).stem}:{kafka_app_name}, ignoring it for now.\"\n                )\n\n        creationflags = 0 if platform.system() != \"Windows\" else subprocess.CREATE_NEW_PROCESS_GROUP  # type: ignore\n        proc = subprocess.Popen(\n            shlex.split(cmd),\n            stdout=subprocess.PIPE,\n            stderr=subprocess.STDOUT,\n            cwd=d,\n            shell=True  # nosec: [B602:subprocess_without_shell_equals_true] subprocess call - check for execution of untrusted input.\n            if platform.system() == \"Windows\"\n            else False,\n            creationflags=creationflags,\n        )\n        await asyncio.sleep(cancel_after)\n        if platform.system() == \"Windows\":\n            proc.send_signal(signal.CTRL_BREAK_EVENT)  # type: ignore\n        else:\n            proc.terminate()\n        output, _ = proc.communicate()\n\n        return (proc.returncode, output)\n\n# %% ../../nbs/004_Test_Utils.ipynb 14\nasync def display_docs(docs_path: str, port: int = 4000) -> None:\n    \"\"\"\n    Serves the documentation using an HTTP server.\n\n    Args:\n        docs_path: Path to the documentation.\n        port: Port number for the HTTP server. Defaults to 4000.\n\n    Returns:\n        None\n    \"\"\"\n    with change_dir(docs_path):\n        process = await asyncio.create_subprocess_exec(\n            \"python3\",\n            \"-m\",\n            \"http.server\",\n            f\"{port}\",\n            stdout=asyncio.subprocess.PIPE,\n            stderr=asyncio.subprocess.PIPE,\n        )\n        try:\n            from google.colab.output import eval_js\n\n            proxy = eval_js(f\"google.colab.kernel.proxyPort({port})\")\n            logger.info(\"Google colab detected! Proxy adjusted.\")\n        except:\n            proxy = f\"http://localhost:{port}\"\n        finally:\n            await asyncio.sleep(2)\n            display(IFrame(f\"{proxy}\", 1000, 700))  # type: ignore\n            await asyncio.sleep(2)\n            await terminate_asyncio_process(process)\n"
  },
  {
    "path": "fastkafka/encoder.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/020_Encoder_Export.ipynb.\n\n# %% auto 0\n__all__ = ['dummy']\n\n# %% ../nbs/020_Encoder_Export.ipynb 1\nfrom fastkafka._components.encoder.avro import (\n    AvroBase,\n    avro_decoder,\n    avro_encoder,\n    avsc_to_pydantic,\n)\nfrom ._components.encoder.json import json_decoder, json_encoder\nfrom ._components.meta import export\n\n__all__ = [\n    \"AvroBase\",\n    \"avro_decoder\",\n    \"avro_encoder\",\n    \"avsc_to_pydantic\",\n    \"json_decoder\",\n    \"json_encoder\",\n]\n\n# %% ../nbs/020_Encoder_Export.ipynb 3\n@export(\"_dummy\")\ndef dummy() -> None:\n    pass\n"
  },
  {
    "path": "fastkafka/executors.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/005_Application_executors_export.ipynb.\n\n# %% auto 0\n__all__ = ['dummy']\n\n# %% ../nbs/005_Application_executors_export.ipynb 1\nfrom ._components.meta import export\nfrom ._components.task_streaming import SequentialExecutor, DynamicTaskExecutor\n\n__all__ = [\"SequentialExecutor\", \"DynamicTaskExecutor\"]\n\n# %% ../nbs/005_Application_executors_export.ipynb 2\n@export(\"_dummy\")\ndef dummy() -> None:\n    pass\n"
  },
  {
    "path": "fastkafka/testing.py",
    "content": "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/000_Testing_export.ipynb.\n\n# %% auto 0\n__all__ = ['dummy']\n\n# %% ../nbs/000_Testing_export.ipynb 1\nfrom ._application.tester import Tester\nfrom ._components.meta import export\nfrom ._testing.apache_kafka_broker import ApacheKafkaBroker\nfrom ._testing.in_memory_broker import InMemoryBroker\nfrom ._testing.local_redpanda_broker import LocalRedpandaBroker\nfrom fastkafka._testing.test_utils import (\n    display_docs,\n    mock_AIOKafkaProducer_send,\n    nb_safe_seed,\n    run_script_and_cancel,\n)\n\n__all__ = [\n    \"InMemoryBroker\",\n    \"ApacheKafkaBroker\",\n    \"LocalRedpandaBroker\",\n    \"Tester\",\n    \"nb_safe_seed\",\n    \"true_after\",\n    \"mock_AIOKafkaProducer_send\",\n    \"run_script_and_cancel\",\n    \"display_docs\",\n]\n\n# %% ../nbs/000_Testing_export.ipynb 3\n@export(\"_dummy\")\ndef dummy() -> None:\n    pass\n"
  },
  {
    "path": "mkdocs/docs_overrides/css/extra.css",
    "content": ""
  },
  {
    "path": "mkdocs/docs_overrides/js/extra.js",
    "content": ""
  },
  {
    "path": "mkdocs/docs_overrides/js/math.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "mkdocs/docs_overrides/js/mathjax.js",
    "content": "window.MathJax = {\n  tex: {\n    inlineMath: [[\"\\\\(\", \"\\\\)\"]],\n    displayMath: [[\"\\\\[\", \"\\\\]\"]],\n    processEscapes: true,\n    processEnvironments: true\n  },\n  options: {\n    ignoreHtmlClass: \".*|\",\n    processHtmlClass: \"arithmatex\"\n  }\n};\n\ndocument$.subscribe(() => {\n  MathJax.typesetPromise()\n})\n"
  },
  {
    "path": "mkdocs/mkdocs.yml",
    "content": "# Site\nsite_name: FastKafka\nsite_url: https://fastkafka.airt.ai\nsite_author: airt\nsite_description: FastKafka is a powerful and easy-to-use Python library for building asynchronous web services that interact with Kafka topics. Built on top of FastAPI, Starlette, Pydantic, AIOKafka and AsyncAPI, FastKafka simplifies the process of writing producers and consumers for Kafka topics.\n# Repository\nrepo_name: fastkafka\nrepo_url: https://github.com/airtai/fastkafka\nedit_uri: ''\n\ncopyright: 2022 onwards, airt\n\ndocs_dir: docs\nsite_dir: site\n\nplugins:\n- literate-nav:\n    nav_file: SUMMARY.md\n- search\n- mkdocstrings:\n    handlers:\n      python:\n        import:\n        - https://docs.python.org/3/objects.inv\n        options:\n          heading_level: 4\n          show_category_heading: true\n          show_root_heading: true\n          show_root_toc_entry: true\n          show_signature_annotations: true\n          show_if_no_docstring: true\n\nmarkdown_extensions:\n- pymdownx.arithmatex:\n    generic: true\n- pymdownx.inlinehilite\n- pymdownx.details\n- pymdownx.emoji\n- pymdownx.magiclink\n- pymdownx.superfences\n- pymdownx.tasklist\n- pymdownx.highlight:\n    linenums: false\n- pymdownx.snippets:\n    check_paths: true\n- pymdownx.tabbed:\n    alternate_style: true\n- admonition\n- toc:\n    permalink: ¤\n#     - callouts\ntheme:\n  name: material\n  custom_dir: site_overrides\n  features:\n  - navigation.instant\n#     - navigation.tabs\n#     - navigation.tabs.sticky\n#     - navigation.sections\n#     - navigation.expand\n  - navigation.indexes\n  - navigation.top\n#     - toc.integrates\n  - search.suggest\n  - search.highlight\n  - search.share\n  - content.code.copy\n  - content.code.annotate\n  palette:\n  - scheme: slate\n    primary: black\n    accent: yellow\n    toggle:\n      icon: material/toggle-switch\n      name: Switch to light mode\n  - scheme: default\n    primary: black   # deep orange\n    accent: red\n    toggle:\n      icon: material/toggle-switch-off-outline\n      name: Switch to dark mode\n  icon:\n    repo: fontawesome/brands/github\n#     repo: fontawesome/brands/gitlab\n    logo: material/compass-outline\n#     admonition:\n#         <type>: <icon>\n  favicon: overrides/images/compass-outline.png\n\nextra_css:\n- overrides/css/extra.css\n\nextra_javascript:\n- overrides/js/extra.js\n- overrides/js/mathjax.js\n- https://polyfill.io/v3/polyfill.min.js?features=es6\n- https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js\nextra:\n  version:\n    provider: mike\n  analytics:\n    provider: google\n    property: G-WLMWPELHMB\n  social_image: https://opengraph.githubassets.com/1671805243.560327/airtai/fastkafka\n"
  },
  {
    "path": "mkdocs/overrides/main.html",
    "content": "{% extends \"base.html\" %}\n\n{% block extrahead %}\n  {% set title = config.site_name %}\n  {% if page and page.meta and page.meta.title %}\n    {% set title = title ~ \" - \" ~ page.meta.title %}\n  {% elif page and page.title and not page.is_homepage %}\n    {% set title = title ~ \" - \" ~ page.title | striptags %}\n  {% endif %}\n  <meta property=\"og:type\" content=\"website\" />\n  <meta property=\"og:title\" content=\"{{ title }}\" />\n  <meta property=\"og:description\" content=\"{{ config.site_description }}\" />\n  <meta property=\"og:url\" content=\"{{ page.canonical_url }}\" />\n  <meta property=\"og:image\" content=\"images/machine.png\" />\n  <meta property=\"og:image:type\" content=\"image/png\" />\n  <meta property=\"og:image:width\" content=\"678\" />\n  <meta property=\"og:image:height\" content=\"533\" />\n{% endblock %}\n"
  },
  {
    "path": "mkdocs/site_overrides/main.html",
    "content": "{% extends \"base.html\" %}\n\n{% block extrahead %}\n  {% set title = config.site_name %}\n  {% if page and page.meta and page.meta.title %}\n    {% set title = title ~ \" - \" ~ page.meta.title %}\n  {% elif page and page.title and not page.is_homepage %}\n    {% set title = title ~ \" - \" ~ page.title | striptags %}\n  {% endif %}\n  {% set image_url = config.extra.social_image %}\n  <meta property=\"og:type\" content=\"website\" />\n  <meta property=\"og:title\" content=\"{{ title }}\" />\n  <meta property=\"og:description\" content=\"{{ config.site_description }}\" />\n  <meta property=\"og:url\" content=\"{{ page.canonical_url }}\" />\n  <meta property=\"og:image\" content=\"{{ image_url }}\" />\n  <meta property=\"og:image:type\" content=\"image/png\" />\n  <meta property=\"og:image:width\" content=\"1200\" />\n  <meta property=\"og:image:height\" content=\"630\" />\n\n  <meta name=\"twitter:card\" content=\"summary_large_image\" />\n  <meta name=\"twitter:title\" content=\"{{ title }}\" />\n  <meta name=\"twitter:description\" content=\"{{ config.site_description }}\" />\n  <meta name=\"twitter:image\" content=\"{{ image_url }}\" />\n{% endblock %}\n\n{% block outdated %}\n  You're not viewing the latest version.\n  <!-- nosemgrep -->\n  <a href=\"{{ '../' ~ base_url }}\">\n    <strong>Click here to go to latest.</strong>\n  </a>\n{% endblock %}\n"
  },
  {
    "path": "mkdocs/site_overrides/partials/copyright.html",
    "content": "<div class=\"md-copyright\">\n  {% if config.copyright %}\n    <div class=\"md-copyright__highlight\">\n      {{ config.copyright }}\n    </div>\n  {% endif %}\n  {% if not config.extra.generator == false %}\n    Made with\n    <a href=\"https://squidfunk.github.io/mkdocs-material/\" target=\"_blank\" rel=\"noopener\">\n      Material for MkDocs\n    </a>\n    and\n    <a href=\"https://nbdev-mkdocs.airt.ai/\" target=\"_blank\" rel=\"noopener\">\n      Material for nbdev\n    </a>\n  {% endif %}\n</div>\n"
  },
  {
    "path": "mkdocs/summary_template.txt",
    "content": "{sidebar}\n- API\n{api}\n- CLI\n{cli}\n- [Releases]{changelog}\n"
  },
  {
    "path": "mypy.ini",
    "content": "# Global options:\n\n[mypy]\nignore_missing_imports = True\ninstall_types = True\nnon_interactive = True\n\n# from https://blog.wolt.com/engineering/2021/09/30/professional-grade-mypy-configuration/\ndisallow_untyped_defs = True\nno_implicit_optional = True\ncheck_untyped_defs = True\nwarn_return_any = True\nshow_error_codes = True \nwarn_unused_ignores = True\n\ndisallow_incomplete_defs = True\ndisallow_untyped_decorators = True\ndisallow_any_unimported = True\n"
  },
  {
    "path": "nbs/.gitignore",
    "content": "application.py\n\n"
  },
  {
    "path": "nbs/000_AIOKafkaImports.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c55da358\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _aiokafka_imports\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a049b3f5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"from aiokafka import AIOKafkaConsumer, AIOKafkaProducer\\n\",\n    \"\\n\",\n    \"from fastkafka._components.meta import export\\n\",\n    \"\\n\",\n    \"__all__ = [\\n\",\n    \"    \\\"AIOKafkaConsumer\\\", \\\"AIOKafkaProducer\\\",\\n\",\n    \"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f2af47ba\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"_dummy\\\")\\n\",\n    \"def dummy() -> None:\\n\",\n    \"    pass\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/000_Testing_export.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8abf46a1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp testing\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ec481239\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"from fastkafka._application.tester import Tester\\n\",\n    \"from fastkafka._components.meta import export\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import ApacheKafkaBroker\\n\",\n    \"from fastkafka._testing.in_memory_broker import InMemoryBroker\\n\",\n    \"from fastkafka._testing.local_redpanda_broker import LocalRedpandaBroker\\n\",\n    \"from fastkafka._testing.test_utils import (\\n\",\n    \"    display_docs,\\n\",\n    \"    mock_AIOKafkaProducer_send,\\n\",\n    \"    nb_safe_seed,\\n\",\n    \"    run_script_and_cancel,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"__all__ = [\\n\",\n    \"    \\\"InMemoryBroker\\\",\\n\",\n    \"    \\\"ApacheKafkaBroker\\\",\\n\",\n    \"    \\\"LocalRedpandaBroker\\\",\\n\",\n    \"    \\\"Tester\\\",\\n\",\n    \"    \\\"nb_safe_seed\\\",\\n\",\n    \"    \\\"true_after\\\",\\n\",\n    \"    \\\"mock_AIOKafkaProducer_send\\\",\\n\",\n    \"    \\\"run_script_and_cancel\\\",\\n\",\n    \"    \\\"display_docs\\\",\\n\",\n    \"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0ce51c90\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert ApacheKafkaBroker.__module__ == \\\"fastkafka.testing\\\", ApacheKafkaBroker.__module__\\n\",\n    \"assert (\\n\",\n    \"    LocalRedpandaBroker.__module__ == \\\"fastkafka.testing\\\"\\n\",\n    \"), LocalRedpandaBroker.__module__\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"70fd14eb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"_dummy\\\")\\n\",\n    \"def dummy() -> None:\\n\",\n    \"    pass\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/001_InMemoryBroker.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4e3e7ac1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _testing.in_memory_broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"78dd1499\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import hashlib\\n\",\n    \"import random\\n\",\n    \"import string\\n\",\n    \"import uuid\\n\",\n    \"from contextlib import contextmanager\\n\",\n    \"from dataclasses import dataclass, field\\n\",\n    \"from typing import *\\n\",\n    \"from types import ModuleType\\n\",\n    \"\\n\",\n    \"from aiokafka.structs import ConsumerRecord, RecordMetadata, TopicPartition\\n\",\n    \"\\n\",\n    \"import fastkafka._components.aiokafka_consumer_loop\\n\",\n    \"import fastkafka._aiokafka_imports\\n\",\n    \"from fastkafka._aiokafka_imports import AIOKafkaConsumer, AIOKafkaProducer\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import (\\n\",\n    \"    _get_default_kwargs_from_sig,\\n\",\n    \"    classcontextmanager,\\n\",\n    \"    delegates,\\n\",\n    \"    patch,\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"df56c33b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import random\\n\",\n    \"import unittest\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"import pytest\\n\",\n    \"\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0e65cf05\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"74e8c6f7\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Local Kafka broker\\n\",\n    \"> In-memory mockup of Kafka broker protocol\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"96928f97\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Kafka partition\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a64d6bc2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@dataclass\\n\",\n    \"class KafkaRecord:\\n\",\n    \"    topic: str = \\\"\\\"\\n\",\n    \"    partition: int = 0\\n\",\n    \"    key: Optional[bytes] = None\\n\",\n    \"    value: bytes = b\\\"\\\"\\n\",\n    \"    offset: int = 0\\n\",\n    \"    timestamp = 0\\n\",\n    \"    timestamp_type = 0\\n\",\n    \"    checksum = 0\\n\",\n    \"    serialized_key_size = 0\\n\",\n    \"    serialized_value_size = 0\\n\",\n    \"    headers: Sequence[Tuple[str, bytes]] = field(default_factory=list)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f1fae851\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class KafkaPartition:\\n\",\n    \"    def __init__(self, *, partition: int, topic: str):\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initialize a KafkaPartition object.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            partition: The partition number.\\n\",\n    \"            topic: The topic name.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.partition = partition\\n\",\n    \"        self.topic = topic\\n\",\n    \"        self.messages: List[KafkaRecord] = list()\\n\",\n    \"\\n\",\n    \"    def write(self, value: bytes, key: Optional[bytes] = None) -> RecordMetadata:  # type: ignore\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Write a Kafka record to the partition.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            value: The value of the record.\\n\",\n    \"            key: The key of the record.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The record metadata.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        record = KafkaRecord(\\n\",\n    \"            topic=self.topic,\\n\",\n    \"            partition=self.partition,\\n\",\n    \"            value=value,\\n\",\n    \"            key=key,\\n\",\n    \"            offset=len(self.messages),\\n\",\n    \"        )\\n\",\n    \"        record_meta = RecordMetadata(\\n\",\n    \"            topic=self.topic,\\n\",\n    \"            partition=self.partition,\\n\",\n    \"            topic_partition=TopicPartition(topic=self.topic, partition=self.partition),\\n\",\n    \"            offset=len(self.messages),\\n\",\n    \"            timestamp=1680602752070,\\n\",\n    \"            timestamp_type=0,\\n\",\n    \"            log_start_offset=0,\\n\",\n    \"        )\\n\",\n    \"        self.messages.append(record)\\n\",\n    \"        return record_meta\\n\",\n    \"\\n\",\n    \"    def read(self, offset: int) -> Tuple[List[KafkaRecord], int]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Read Kafka records from the partition starting from the given offset.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            offset: The starting offset.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            A tuple containing the list of records and the current offset.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return self.messages[offset:], len(self.messages)\\n\",\n    \"\\n\",\n    \"    def latest_offset(self) -> int:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Get the latest offset of the partition.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The latest offset.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return len(self.messages)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"99e5b162\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"partition_index = 0\\n\",\n    \"topic = \\\"test\\\"\\n\",\n    \"partition = KafkaPartition(partition=partition_index, topic=topic)\\n\",\n    \"\\n\",\n    \"msgs = [b\\\"some_msg\\\" for _ in range(25)]\\n\",\n    \"\\n\",\n    \"expected = [\\n\",\n    \"    KafkaRecord(topic=topic, partition=partition_index, value=msg, offset=offset)\\n\",\n    \"    for offset, msg in enumerate(msgs)\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"for msg in msgs:\\n\",\n    \"    partition.write(msg)\\n\",\n    \"\\n\",\n    \"for offset in [0, 10, 20]:\\n\",\n    \"    actual = partition.read(offset=offset)\\n\",\n    \"\\n\",\n    \"    assert actual == (expected[offset:], len(msgs))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"be209005\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"partition_index = 0\\n\",\n    \"topic = \\\"test\\\"\\n\",\n    \"key = b\\\"some_key\\\"\\n\",\n    \"partition = KafkaPartition(partition=partition_index, topic=topic)\\n\",\n    \"\\n\",\n    \"msgs = [b\\\"some_msg\\\" for _ in range(25)]\\n\",\n    \"expected = [\\n\",\n    \"    KafkaRecord(\\n\",\n    \"        topic=topic, partition=partition_index, value=msg, key=key, offset=offset\\n\",\n    \"    )\\n\",\n    \"    for offset, msg in enumerate(msgs)\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"for msg in msgs:\\n\",\n    \"    partition.write(msg, key=key)\\n\",\n    \"\\n\",\n    \"for offset in [0, 10, 20]:\\n\",\n    \"    actual = partition.read(offset=offset)\\n\",\n    \"\\n\",\n    \"    assert actual == (expected[offset:], len(msgs)), print(f\\\"{actual} != {expected}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"145cd897\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Kafka topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b02c802e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class KafkaTopic:\\n\",\n    \"    def __init__(self, topic: str, num_partitions: int = 1):\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initialize a KafkaTopic object.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            topic: The topic name.\\n\",\n    \"            num_partitions: The number of partitions in the topic (default: 1).\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.topic = topic\\n\",\n    \"        self.num_partitions = num_partitions\\n\",\n    \"        self.partitions: List[KafkaPartition] = [\\n\",\n    \"            KafkaPartition(topic=topic, partition=partition_index)\\n\",\n    \"            for partition_index in range(num_partitions)\\n\",\n    \"        ]\\n\",\n    \"\\n\",\n    \"    def read(  # type: ignore\\n\",\n    \"        self, partition: int, offset: int\\n\",\n    \"    ) -> Tuple[TopicPartition, List[KafkaRecord], int]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Read records from the specified partition and offset.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            partition: The partition index.\\n\",\n    \"            offset: The offset from which to start reading.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            A tuple containing the topic partition, list of Kafka records, and the new offset.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        topic_partition = TopicPartition(topic=self.topic, partition=partition)\\n\",\n    \"        records, offset = self.partitions[partition].read(offset)\\n\",\n    \"        return topic_partition, records, offset\\n\",\n    \"\\n\",\n    \"    def write_with_partition(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        value: bytes,\\n\",\n    \"        partition: int,\\n\",\n    \"    ) -> RecordMetadata:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Write a record with a specified partition.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            value: The value of the record.\\n\",\n    \"            partition: The partition to write the record to.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The metadata of the written record.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return self.partitions[partition].write(value)\\n\",\n    \"\\n\",\n    \"    def write_with_key(self, value: bytes, key: bytes) -> RecordMetadata:  # type: ignore\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Write a record with a specified key.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            value: The value of the record.\\n\",\n    \"            key: The key of the record.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The metadata of the written record.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        partition = int(hashlib.sha256(key).hexdigest(), 16) % self.num_partitions\\n\",\n    \"        return self.partitions[partition].write(value, key=key)\\n\",\n    \"\\n\",\n    \"    def write(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        value: bytes,\\n\",\n    \"        *,\\n\",\n    \"        key: Optional[bytes] = None,\\n\",\n    \"        partition: Optional[int] = None,\\n\",\n    \"    ) -> RecordMetadata:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Write a record to the topic.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            value: The value of the record.\\n\",\n    \"            key: The key of the record (optional).\\n\",\n    \"            partition: The partition to write the record to (optional).\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The metadata of the written record.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        if partition is not None:\\n\",\n    \"            return self.write_with_partition(value, partition)\\n\",\n    \"\\n\",\n    \"        if key is not None:\\n\",\n    \"            return self.write_with_key(value, key)\\n\",\n    \"\\n\",\n    \"        partition = random.randint(0, self.num_partitions - 1)  # nosec\\n\",\n    \"        return self.write_with_partition(value, partition)\\n\",\n    \"\\n\",\n    \"    def latest_offset(self, partition: int) -> int:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Get the latest offset of a partition.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            partition: The partition index.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The latest offset of the partition.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return self.partitions[partition].latest_offset()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e1fcfe86\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"msg = b\\\"msg\\\"\\n\",\n    \"\\n\",\n    \"topic = KafkaTopic(\\\"test_topic\\\", 1)\\n\",\n    \"\\n\",\n    \"expected = RecordMetadata(\\n\",\n    \"    topic=\\\"test_topic\\\",\\n\",\n    \"    partition=0,\\n\",\n    \"    topic_partition=TopicPartition(topic=\\\"test_topic\\\", partition=0),\\n\",\n    \"    offset=0,\\n\",\n    \"    timestamp=1680602752070,\\n\",\n    \"    timestamp_type=0,\\n\",\n    \"    log_start_offset=0,\\n\",\n    \")\\n\",\n    \"actual = topic.write(msg)\\n\",\n    \"\\n\",\n    \"assert expected == actual\\n\",\n    \"\\n\",\n    \"expected = RecordMetadata(\\n\",\n    \"    topic=\\\"test_topic\\\",\\n\",\n    \"    partition=0,\\n\",\n    \"    topic_partition=TopicPartition(topic=\\\"test_topic\\\", partition=0),\\n\",\n    \"    offset=1,\\n\",\n    \"    timestamp=1680602752070,\\n\",\n    \"    timestamp_type=0,\\n\",\n    \"    log_start_offset=0,\\n\",\n    \")\\n\",\n    \"actual = topic.write(msg, key=b\\\"123\\\")\\n\",\n    \"\\n\",\n    \"assert expected == actual, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dbb4ba74\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"topic_name = \\\"test_topic\\\"\\n\",\n    \"msgs = [b\\\"msg\\\" for _ in range(1000)]\\n\",\n    \"partition_num = 10\\n\",\n    \"\\n\",\n    \"topic = KafkaTopic(topic_name, partition_num)\\n\",\n    \"\\n\",\n    \"# write to topic\\n\",\n    \"for msg in msgs:\\n\",\n    \"    topic.write(msg)\\n\",\n    \"\\n\",\n    \"# For each partition in topic check:\\n\",\n    \"for partition in range(partition_num):\\n\",\n    \"    topic_partition_expected = TopicPartition(topic=topic_name, partition=partition)\\n\",\n    \"    topic_partition_actual, data, _ = topic.read(partition=partition, offset=0)\\n\",\n    \"\\n\",\n    \"    # Read returns correct TopicPartition key\\n\",\n    \"    assert topic_partition_actual == topic_partition_expected\\n\",\n    \"\\n\",\n    \"    # Data is written into partition\\n\",\n    \"    assert len(data) > 0\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1f492339\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"topic_name = \\\"test_topic\\\"\\n\",\n    \"msgs = [b\\\"msg\\\" for _ in range(1000)]\\n\",\n    \"partition_num = 2\\n\",\n    \"\\n\",\n    \"topic = KafkaTopic(topic_name, partition_num)\\n\",\n    \"\\n\",\n    \"# write to topic with defined partition\\n\",\n    \"for msg in msgs:\\n\",\n    \"    topic.write(msg, partition=0)\\n\",\n    \"\\n\",\n    \"lengths = [len(topic.read(partition=i, offset=0)[1]) for i in range(partition_num)]\\n\",\n    \"\\n\",\n    \"assert [1000, 0] == lengths\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fff42583\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"topic_name = \\\"test_topic\\\"\\n\",\n    \"msgs = [b\\\"msg\\\" for _ in range(1000)]\\n\",\n    \"partition_num = 3\\n\",\n    \"\\n\",\n    \"topic = KafkaTopic(topic_name, partition_num)\\n\",\n    \"\\n\",\n    \"# write to topic with defined key\\n\",\n    \"for msg in msgs[:450]:\\n\",\n    \"    topic.write(msg, key=b\\\"some_key\\\")\\n\",\n    \"\\n\",\n    \"for msg in msgs[450:]:\\n\",\n    \"    topic.write(msg, key=b\\\"some_key443\\\")\\n\",\n    \"\\n\",\n    \"lengths = [len(topic.read(partition=i, offset=0)[1]) for i in range(partition_num)]\\n\",\n    \"\\n\",\n    \"assert [0, 450, 550] == sorted(lengths)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a0280992\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Group metadata\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d1f0e7c0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def split_list(list_to_split: List[Any], split_size: int) -> List[List[Any]]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Split a list into smaller lists of a specified size.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        list_to_split: The list to split.\\n\",\n    \"        split_size: The size of each split.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of smaller lists.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return [\\n\",\n    \"        list_to_split[start_index : start_index + split_size]\\n\",\n    \"        for start_index in range(0, len(list_to_split), split_size)\\n\",\n    \"    ]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1d58bb33\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert split_list([1, 2, 3, 4, 5], 1) == [[1], [2], [3], [4], [5]]\\n\",\n    \"assert split_list([1, 2, 3, 4, 5], 2) == [[1, 2], [3, 4], [5]]\\n\",\n    \"assert split_list([1, 2, 3, 4, 5], 3) == [[1, 2, 3], [4, 5]]\\n\",\n    \"assert split_list([1, 2, 3, 4, 5], 5) == [[1, 2, 3, 4, 5]]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"90d34107\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class GroupMetadata:\\n\",\n    \"    def __init__(self, num_partitions: int):\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initialize a GroupMetadata object.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            num_partitions: The number of partitions in the group.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.num_partitions = num_partitions\\n\",\n    \"        self.partitions_offsets: Dict[int, int] = {}\\n\",\n    \"        self.consumer_ids: List[uuid.UUID] = list()\\n\",\n    \"        self.partition_assignments: Dict[uuid.UUID, List[int]] = {}\\n\",\n    \"\\n\",\n    \"    def subscribe(self, consumer_id: uuid.UUID) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Subscribe a consumer to the group.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            consumer_id: The ID of the consumer.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.consumer_ids.append(consumer_id)\\n\",\n    \"        self.rebalance()\\n\",\n    \"\\n\",\n    \"    def unsubscribe(self, consumer_id: uuid.UUID) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Unsubscribe a consumer from the group.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            consumer_id: The ID of the consumer.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.consumer_ids.remove(consumer_id)\\n\",\n    \"        self.rebalance()\\n\",\n    \"\\n\",\n    \"    def rebalance(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Rebalance the group's partition assignments.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        if len(self.consumer_ids) == 0:\\n\",\n    \"            self.partition_assignments = {}\\n\",\n    \"        else:\\n\",\n    \"            partitions_per_actor = self.num_partitions // len(self.consumer_ids)\\n\",\n    \"            if self.num_partitions % len(self.consumer_ids) != 0:\\n\",\n    \"                partitions_per_actor += 1\\n\",\n    \"            self.assign_partitions(partitions_per_actor)\\n\",\n    \"\\n\",\n    \"    def assign_partitions(self, partitions_per_actor: int) -> None:\\n\",\n    \"        partitions = [i for i in range(self.num_partitions)]\\n\",\n    \"\\n\",\n    \"        partitions_split = split_list(partitions, partitions_per_actor)\\n\",\n    \"        self.partition_assignments = {\\n\",\n    \"            self.consumer_ids[i]: partition_split\\n\",\n    \"            for i, partition_split in enumerate(partitions_split)\\n\",\n    \"        }\\n\",\n    \"\\n\",\n    \"    def get_partitions(\\n\",\n    \"        self, consumer_id: uuid.UUID\\n\",\n    \"    ) -> Tuple[List[int], Dict[int, Optional[int]]]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Get the partition assignments and offsets for a consumer.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            consumer_id: The ID of the consumer.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            A tuple containing the partition assignments and offsets.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        partition_assignments = self.partition_assignments.get(consumer_id, [])\\n\",\n    \"        partition_offsets_assignments = {\\n\",\n    \"            partition: self.partitions_offsets.get(partition, None)\\n\",\n    \"            for partition in partition_assignments\\n\",\n    \"        }\\n\",\n    \"        return partition_assignments, partition_offsets_assignments\\n\",\n    \"\\n\",\n    \"    def set_offset(self, partition: int, offset: int) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Set the offset for a partition.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            partition: The partition index.\\n\",\n    \"            offset: The offset to set.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.partitions_offsets[partition] = offset\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9a230027\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"group_meta = GroupMetadata(num_partitions=3)\\n\",\n    \"\\n\",\n    \"# subscribe first consumer\\n\",\n    \"consumer_id_1 = uuid.uuid4()\\n\",\n    \"group_meta.subscribe(consumer_id_1)\\n\",\n    \"# check partitions\\n\",\n    \"assert group_meta.get_partitions(consumer_id_1)[0] == [0, 1, 2]\\n\",\n    \"\\n\",\n    \"# subscribe second consumer\\n\",\n    \"consumer_id_2 = uuid.uuid4()\\n\",\n    \"group_meta.subscribe(consumer_id_2)\\n\",\n    \"# check partitions\\n\",\n    \"assert group_meta.get_partitions(consumer_id_1)[0] == [0, 1]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_2)[0] == [2]\\n\",\n    \"\\n\",\n    \"# subscribe third consumer\\n\",\n    \"consumer_id_3 = uuid.uuid4()\\n\",\n    \"group_meta.subscribe(consumer_id_3)\\n\",\n    \"# check partitions\\n\",\n    \"assert group_meta.get_partitions(consumer_id_1)[0] == [0]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_2)[0] == [1]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_3)[0] == [2]\\n\",\n    \"\\n\",\n    \"# subscribe fourth consumer\\n\",\n    \"# subscribe third consumer\\n\",\n    \"consumer_id_4 = uuid.uuid4()\\n\",\n    \"group_meta.subscribe(consumer_id_4)\\n\",\n    \"# check partitions\\n\",\n    \"assert group_meta.get_partitions(consumer_id_1)[0] == [0]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_2)[0] == [1]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_3)[0] == [2]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_4)[0] == []  # fourth consumer is starving\\n\",\n    \"\\n\",\n    \"# Unsubscribe one consumer\\n\",\n    \"group_meta.unsubscribe(consumer_id_3)\\n\",\n    \"# check partitions\\n\",\n    \"assert group_meta.get_partitions(consumer_id_1)[0] == [0]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_2)[0] == [1]\\n\",\n    \"assert group_meta.get_partitions(consumer_id_4)[0] == [2], group_meta.get_partitions(\\n\",\n    \"    consumer_id_4\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"# Unsubscribe all but one consumer\\n\",\n    \"group_meta.unsubscribe(consumer_id_1)\\n\",\n    \"group_meta.unsubscribe(consumer_id_4)\\n\",\n    \"assert group_meta.get_partitions(consumer_id_2)[0] == [0, 1, 2]\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0e25de69\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Kafka broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"46e96510\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@classcontextmanager()\\n\",\n    \"class InMemoryBroker:\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        num_partitions: int = 1,\\n\",\n    \"    ):\\n\",\n    \"        self.num_partitions = num_partitions\\n\",\n    \"        self.topics: Dict[Tuple[str, str], KafkaTopic] = {}\\n\",\n    \"        self.topic_groups: Dict[Tuple[str, str, str], GroupMetadata] = {}\\n\",\n    \"        self.is_started: bool = False\\n\",\n    \"\\n\",\n    \"    def connect(self) -> uuid.UUID:\\n\",\n    \"        return uuid.uuid4()\\n\",\n    \"\\n\",\n    \"    def dissconnect(self, consumer_id: uuid.UUID) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Disconnect a consumer from the broker.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            consumer_id: The ID of the consumer.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    def subscribe(\\n\",\n    \"        self, bootstrap_server: str, topic: str, group: str, consumer_id: uuid.UUID\\n\",\n    \"    ) -> None:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    def unsubscribe(\\n\",\n    \"        self, bootstrap_server: str, topic: str, group: str, consumer_id: uuid.UUID\\n\",\n    \"    ) -> None:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    def read(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        *,\\n\",\n    \"        bootstrap_server: str,\\n\",\n    \"        topic: str,\\n\",\n    \"        group: str,\\n\",\n    \"        consumer_id: uuid.UUID,\\n\",\n    \"        auto_offset_reset: str,\\n\",\n    \"    ) -> Dict[TopicPartition, List[KafkaRecord]]:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    def write(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        *,\\n\",\n    \"        bootstrap_server: str,\\n\",\n    \"        topic: str,\\n\",\n    \"        value: bytes,\\n\",\n    \"        key: Optional[bytes] = None,\\n\",\n    \"        partition: Optional[int] = None,\\n\",\n    \"    ) -> RecordMetadata:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @contextmanager\\n\",\n    \"    def lifecycle(self) -> Iterator[\\\"InMemoryBroker\\\"]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Context manager for the lifecycle of the in-memory broker.\\n\",\n    \"\\n\",\n    \"        Yields:\\n\",\n    \"            An instance of the in-memory broker.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    async def _start(self) -> str:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Start the in-memory broker.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The address of the broker.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        logger.info(\\\"InMemoryBroker._start() called\\\")\\n\",\n    \"        self.__enter__()  # type: ignore\\n\",\n    \"        return \\\"localbroker:0\\\"\\n\",\n    \"\\n\",\n    \"    async def _stop(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Stop the in-memory broker.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        logger.info(\\\"InMemoryBroker._stop() called\\\")\\n\",\n    \"        self.__exit__(None, None, None)  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b4a4bcae\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def subscribe(\\n\",\n    \"    self: InMemoryBroker,\\n\",\n    \"    bootstrap_server: str,\\n\",\n    \"    topic: str,\\n\",\n    \"    group: str,\\n\",\n    \"    consumer_id: uuid.UUID,\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Subscribe a consumer to a topic group.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        bootstrap_server: The bootstrap server address.\\n\",\n    \"        topic: The topic to subscribe to.\\n\",\n    \"        group: The group to join.\\n\",\n    \"        consumer_id: The ID of the consumer.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if (bootstrap_server, topic) not in self.topics:\\n\",\n    \"        self.topics[(bootstrap_server, topic)] = KafkaTopic(\\n\",\n    \"            topic=topic, num_partitions=self.num_partitions\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    group_meta = self.topic_groups.get(\\n\",\n    \"        (bootstrap_server, topic, group), GroupMetadata(self.num_partitions)\\n\",\n    \"    )\\n\",\n    \"    group_meta.subscribe(consumer_id)\\n\",\n    \"    self.topic_groups[(bootstrap_server, topic, group)] = group_meta\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def unsubscribe(\\n\",\n    \"    self: InMemoryBroker,\\n\",\n    \"    bootstrap_server: str,\\n\",\n    \"    topic: str,\\n\",\n    \"    group: str,\\n\",\n    \"    consumer_id: uuid.UUID,\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Unsubscribe a consumer from a topic group.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        bootstrap_server: The bootstrap server address.\\n\",\n    \"        topic: The topic to unsubscribe from.\\n\",\n    \"        group: The group to leave.\\n\",\n    \"        consumer_id: The ID of the consumer.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    self.topic_groups[(bootstrap_server, topic, group)].unsubscribe(consumer_id)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4d4ac05b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"topic = \\\"topic1\\\"\\n\",\n    \"bootstrap_server = \\\"localhost:9092\\\"\\n\",\n    \"consumer_group = \\\"my_group\\\"\\n\",\n    \"\\n\",\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"with pytest.raises(KeyError):\\n\",\n    \"    broker.topic_groups[(bootstrap_server, topic, consumer_group)]\\n\",\n    \"\\n\",\n    \"consumer_id = broker.connect()\\n\",\n    \"\\n\",\n    \"broker.subscribe(bootstrap_server, topic, consumer_group, consumer_id)\\n\",\n    \"broker.topic_groups[(bootstrap_server, topic, consumer_group)]\\n\",\n    \"\\n\",\n    \"broker.unsubscribe(bootstrap_server, topic, consumer_group, consumer_id)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"72375d67\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def write(  # type: ignore\\n\",\n    \"    self: InMemoryBroker,\\n\",\n    \"    *,\\n\",\n    \"    bootstrap_server: str,\\n\",\n    \"    topic: str,\\n\",\n    \"    value: bytes,\\n\",\n    \"    key: Optional[bytes] = None,\\n\",\n    \"    partition: Optional[int] = None,\\n\",\n    \") -> RecordMetadata:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Write a message to a topic.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        bootstrap_server: The bootstrap server address.\\n\",\n    \"        topic: The topic to write the message to.\\n\",\n    \"        value: The value of the message.\\n\",\n    \"        key: The key associated with the message.\\n\",\n    \"        partition: The partition ID to write the message to.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The metadata of the written message.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if (bootstrap_server, topic) not in self.topics:\\n\",\n    \"        self.topics[(bootstrap_server, topic)] = KafkaTopic(\\n\",\n    \"            topic=topic, num_partitions=self.num_partitions\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    return self.topics[(bootstrap_server, topic)].write(\\n\",\n    \"        value, key=key, partition=partition\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d68339dd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"for key in [None, b\\\"some_key\\\"]:\\n\",\n    \"    topic = \\\"my_topic\\\"\\n\",\n    \"    bootstrap_server = \\\"localhost:9092\\\"\\n\",\n    \"    value = b\\\"msg\\\"\\n\",\n    \"\\n\",\n    \"    broker = InMemoryBroker(num_partitions=3)\\n\",\n    \"\\n\",\n    \"    record_meta = broker.write(\\n\",\n    \"        bootstrap_server=bootstrap_server, topic=topic, value=value, key=key\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    assert record_meta.topic == \\\"my_topic\\\"\\n\",\n    \"    assert record_meta.offset == 0\\n\",\n    \"\\n\",\n    \"    expected_msgs = [\\n\",\n    \"        KafkaRecord(\\n\",\n    \"            topic=\\\"my_topic\\\",\\n\",\n    \"            partition=record_meta.partition,\\n\",\n    \"            key=key,\\n\",\n    \"            value=b\\\"msg\\\",\\n\",\n    \"            offset=0,\\n\",\n    \"        )\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    topic_partition, actual_msgs, new_offset = broker.topics[\\n\",\n    \"        (bootstrap_server, topic)\\n\",\n    \"    ].read(partition=record_meta.partition, offset=record_meta.offset)\\n\",\n    \"\\n\",\n    \"    assert actual_msgs == expected_msgs\\n\",\n    \"    assert topic_partition == record_meta.topic_partition\\n\",\n    \"    assert new_offset == 1\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f44154f8\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def read(  # type: ignore\\n\",\n    \"    self: InMemoryBroker,\\n\",\n    \"    *,\\n\",\n    \"    bootstrap_server: str,\\n\",\n    \"    topic: str,\\n\",\n    \"    group: str,\\n\",\n    \"    consumer_id: uuid.UUID,\\n\",\n    \"    auto_offset_reset: str,\\n\",\n    \") -> Dict[TopicPartition, List[KafkaRecord]]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Read messages from a topic group.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        bootstrap_server: The bootstrap server address.\\n\",\n    \"        topic: The topic to read messages from.\\n\",\n    \"        group: The group to read messages for.\\n\",\n    \"        consumer_id: The ID of the consumer.\\n\",\n    \"        auto_offset_reset: The strategy to use when the consumer does not have a valid offset for the group.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A dictionary containing the messages retrieved from each topic partition.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    group_meta = self.topic_groups[(bootstrap_server, topic, group)]\\n\",\n    \"    partitions, offsets = group_meta.get_partitions(consumer_id)\\n\",\n    \"\\n\",\n    \"    if len(partitions) == 0:\\n\",\n    \"        return {}\\n\",\n    \"\\n\",\n    \"    partitions_data = {}\\n\",\n    \"\\n\",\n    \"    for partition in partitions:\\n\",\n    \"        offset = offsets[partition]\\n\",\n    \"\\n\",\n    \"        if offset is None:\\n\",\n    \"            offset = (\\n\",\n    \"                self.topics[(bootstrap_server, topic)].latest_offset(partition)\\n\",\n    \"                if auto_offset_reset == \\\"latest\\\"\\n\",\n    \"                else 0\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"        topic_partition, data, offset = self.topics[(bootstrap_server, topic)].read(\\n\",\n    \"            partition, offset\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        partitions_data[topic_partition] = data\\n\",\n    \"        group_meta.set_offset(partition, offset)\\n\",\n    \"\\n\",\n    \"    return partitions_data\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9f2edf62\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check subscribing and reading from empty partitions for same group\\n\",\n    \"\\n\",\n    \"topic = \\\"topic1\\\"\\n\",\n    \"bootstrap_server = \\\"localhost:9092\\\"\\n\",\n    \"consumer_group = \\\"my_group\\\"\\n\",\n    \"\\n\",\n    \"broker = InMemoryBroker(num_partitions=3)\\n\",\n    \"\\n\",\n    \"consumer_id_1 = broker.connect()\\n\",\n    \"broker.subscribe(bootstrap_server, topic, consumer_group, consumer_id_1)\\n\",\n    \"\\n\",\n    \"assert broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    group=consumer_group,\\n\",\n    \"    consumer_id=consumer_id_1,\\n\",\n    \"    auto_offset_reset=\\\"latest\\\",\\n\",\n    \") == {\\n\",\n    \"    TopicPartition(topic=topic, partition=0): [],\\n\",\n    \"    TopicPartition(topic=topic, partition=1): [],\\n\",\n    \"    TopicPartition(topic=topic, partition=2): [],\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"consumer_id_2 = broker.connect()\\n\",\n    \"broker.subscribe(bootstrap_server, topic, consumer_group, consumer_id_2)\\n\",\n    \"\\n\",\n    \"assert broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    group=consumer_group,\\n\",\n    \"    consumer_id=consumer_id_1,\\n\",\n    \"    auto_offset_reset=\\\"latest\\\",\\n\",\n    \") == {\\n\",\n    \"    TopicPartition(topic=topic, partition=0): [],\\n\",\n    \"    TopicPartition(topic=topic, partition=1): [],\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"assert broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    group=consumer_group,\\n\",\n    \"    consumer_id=consumer_id_2,\\n\",\n    \"    auto_offset_reset=\\\"latest\\\",\\n\",\n    \") == {\\n\",\n    \"    TopicPartition(topic=topic, partition=2): [],\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"broker.unsubscribe(bootstrap_server, topic, consumer_group, consumer_id_1)\\n\",\n    \"assert broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    group=consumer_group,\\n\",\n    \"    consumer_id=consumer_id_2,\\n\",\n    \"    auto_offset_reset=\\\"latest\\\",\\n\",\n    \") == {\\n\",\n    \"    TopicPartition(topic=topic, partition=0): [],\\n\",\n    \"    TopicPartition(topic=topic, partition=1): [],\\n\",\n    \"    TopicPartition(topic=topic, partition=2): [],\\n\",\n    \"}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a30de58e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# check writing to partitions\\n\",\n    \"\\n\",\n    \"topic = \\\"topic1\\\"\\n\",\n    \"bootstrap_server = \\\"localhost:9092\\\"\\n\",\n    \"consumer_group = \\\"my_group\\\"\\n\",\n    \"\\n\",\n    \"broker = InMemoryBroker(num_partitions=1)\\n\",\n    \"\\n\",\n    \"consumer_id_1 = broker.connect()\\n\",\n    \"broker.subscribe(bootstrap_server, topic, consumer_group, consumer_id_1)\\n\",\n    \"\\n\",\n    \"record_meta = broker.write(bootstrap_server=bootstrap_server, topic=topic, value=b\\\"msg\\\")\\n\",\n    \"\\n\",\n    \"assert record_meta.topic == topic\\n\",\n    \"assert record_meta.partition == 0\\n\",\n    \"assert record_meta.topic_partition == TopicPartition(topic=topic, partition=0)\\n\",\n    \"assert record_meta.offset == 0\\n\",\n    \"\\n\",\n    \"assert broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    consumer_id=consumer_id_1,\\n\",\n    \"    group=consumer_group,\\n\",\n    \"    auto_offset_reset=\\\"earliest\\\",\\n\",\n    \") == {\\n\",\n    \"    TopicPartition(topic=topic, partition=0): [\\n\",\n    \"        KafkaRecord(topic=topic, partition=0, key=None, value=b\\\"msg\\\", offset=0)\\n\",\n    \"    ]\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"broker.write(bootstrap_server=bootstrap_server, topic=topic, value=b\\\"msg\\\")\\n\",\n    \"\\n\",\n    \"consumer_group_new = \\\"another_group\\\"\\n\",\n    \"\\n\",\n    \"consumer_id_2 = broker.connect()\\n\",\n    \"broker.subscribe(bootstrap_server, topic, consumer_group_new, consumer_id_2)\\n\",\n    \"\\n\",\n    \"assert broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    consumer_id=consumer_id_2,\\n\",\n    \"    group=consumer_group_new,\\n\",\n    \"    auto_offset_reset=\\\"latest\\\",\\n\",\n    \") == {TopicPartition(topic=topic, partition=0): []}\\n\",\n    \"\\n\",\n    \"assert broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    consumer_id=consumer_id_1,\\n\",\n    \"    group=consumer_group,\\n\",\n    \"    auto_offset_reset=\\\"latest\\\",\\n\",\n    \") == {\\n\",\n    \"    TopicPartition(topic=topic, partition=0): [\\n\",\n    \"        KafkaRecord(topic=topic, partition=0, key=None, value=b\\\"msg\\\", offset=1)\\n\",\n    \"    ]\\n\",\n    \"}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c59295ad\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"topic = \\\"my_topic\\\"\\n\",\n    \"bootstrap_server = \\\"localhost:9092\\\"\\n\",\n    \"group = \\\"my_group\\\"\\n\",\n    \"\\n\",\n    \"in_memory_broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"consumer_id = in_memory_broker.connect()\\n\",\n    \"\\n\",\n    \"with pytest.raises(KeyError) as e:\\n\",\n    \"    in_memory_broker.read(\\n\",\n    \"        bootstrap_server=bootstrap_server,\\n\",\n    \"        topic=topic,\\n\",\n    \"        group=group,\\n\",\n    \"        consumer_id=consumer_id,\\n\",\n    \"        auto_offset_reset=\\\"latest\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"in_memory_broker.subscribe(\\n\",\n    \"    bootstrap_server=bootstrap_server, topic=topic, group=group, consumer_id=consumer_id\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"msg = in_memory_broker.read(\\n\",\n    \"    bootstrap_server=bootstrap_server,\\n\",\n    \"    topic=topic,\\n\",\n    \"    group=group,\\n\",\n    \"    consumer_id=consumer_id,\\n\",\n    \"    auto_offset_reset=\\\"earliest\\\",\\n\",\n    \")\\n\",\n    \"assert msg == {TopicPartition(topic=topic, partition=0): []}, msg\"\n   ]\n  },\n  {\n   \"cell_type\": \"raw\",\n   \"id\": \"cee0ab7e\",\n   \"metadata\": {},\n   \"source\": [\n    \"#| notest\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker([\\\"my_topic\\\"], apply_nest_asyncio=True) as bootstrap_servers:\\n\",\n    \"    producer = AIOKafkaProducer(bootstrap_servers=bootstrap_servers)\\n\",\n    \"    await producer.start()\\n\",\n    \"    for _ in range(1000):\\n\",\n    \"        record = await producer.send(topic=\\\"not_my_topic\\\", value=b\\\"not my message\\\")\\n\",\n    \"    await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"raw\",\n   \"id\": \"fe028489\",\n   \"metadata\": {},\n   \"source\": [\n    \"#| notest\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker([\\\"my_topic\\\"], apply_nest_asyncio=True) as bootstrap_servers:\\n\",\n    \"    consumer = AIOKafkaConsumer(\\\"my_topic\\\", bootstrap_servers=bootstrap_servers)\\n\",\n    \"    await consumer.start()\\n\",\n    \"    print(\\\"getmany()...\\\")\\n\",\n    \"    msg = await consumer.getmany(timeout_ms=0)\\n\",\n    \"    print(\\\"exiting...\\\")\\n\",\n    \"    await consumer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"88c5b3a9\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Consumer patching\\n\",\n    \"\\n\",\n    \"We need to patch AIOKafkaConsumer methods so that we can redirect the consumer to our local kafka broker.\\n\",\n    \"\\n\",\n    \"Patched methods:\\n\",\n    \"\\n\",\n    \"- [x] \\\\_\\\\_init\\\\_\\\\_\\n\",\n    \"- [x] start\\n\",\n    \"- [x] subscribe\\n\",\n    \"- [x] stop\\n\",\n    \"- [x] getmany\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c85ca42b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# InMemoryConsumer\\n\",\n    \"class InMemoryConsumer:\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        broker: InMemoryBroker,\\n\",\n    \"    ) -> None:\\n\",\n    \"        self.broker = broker\\n\",\n    \"        self._id: Optional[uuid.UUID] = None\\n\",\n    \"        self._auto_offset_reset: str = \\\"latest\\\"\\n\",\n    \"        self._group_id: Optional[str] = None\\n\",\n    \"        self._topics: List[str] = list()\\n\",\n    \"        self._bootstrap_servers = \\\"\\\"\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaConsumer)\\n\",\n    \"    def __call__(self, **kwargs: Any) -> \\\"InMemoryConsumer\\\":\\n\",\n    \"        defaults = _get_default_kwargs_from_sig(InMemoryConsumer.__call__, **kwargs)\\n\",\n    \"        consume_copy = InMemoryConsumer(self.broker)\\n\",\n    \"        consume_copy._auto_offset_reset = defaults[\\\"auto_offset_reset\\\"]\\n\",\n    \"        consume_copy._bootstrap_servers = (\\n\",\n    \"            \\\"\\\".join(defaults[\\\"bootstrap_servers\\\"])\\n\",\n    \"            if isinstance(defaults[\\\"bootstrap_servers\\\"], list)\\n\",\n    \"            else defaults[\\\"bootstrap_servers\\\"]\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        consume_copy._group_id = (\\n\",\n    \"            defaults[\\\"group_id\\\"]\\n\",\n    \"            if defaults[\\\"group_id\\\"] is not None\\n\",\n    \"            else \\\"\\\".join(random.choices(string.ascii_letters, k=10))  # nosec\\n\",\n    \"        )\\n\",\n    \"        return consume_copy\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaConsumer.start)\\n\",\n    \"    async def start(self, **kwargs: Any) -> None:\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaConsumer.stop)\\n\",\n    \"    async def stop(self, **kwargs: Any) -> None:\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaConsumer.subscribe)\\n\",\n    \"    def subscribe(self, topics: List[str], **kwargs: Any) -> None:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaConsumer.getmany)\\n\",\n    \"    async def getmany(  # type: ignore\\n\",\n    \"        self, **kwargs: Any\\n\",\n    \"    ) -> Dict[TopicPartition, List[ConsumerRecord]]:\\n\",\n    \"        raise NotImplementedError()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"677208e3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:04:22.629 [ERROR] asyncio: Unclosed AIOKafkaConsumer\\n\",\n      \"consumer: <aiokafka.consumer.consumer.AIOKafkaConsumer object>\\n\",\n      \"23-06-26 08:04:22.631 [ERROR] asyncio: Unclosed AIOKafkaConsumer\\n\",\n      \"consumer: <aiokafka.consumer.consumer.AIOKafkaConsumer object>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ConsumerClass = InMemoryConsumer(broker)\\n\",\n    \"\\n\",\n    \"for cls in [ConsumerClass, AIOKafkaConsumer]:\\n\",\n    \"    consumer = cls()\\n\",\n    \"    assert consumer._auto_offset_reset == \\\"latest\\\"\\n\",\n    \"\\n\",\n    \"    consumer = cls(auto_offset_reset=\\\"earliest\\\")\\n\",\n    \"    assert consumer._auto_offset_reset == \\\"earliest\\\", consumer._auto_offset_reset\\n\",\n    \"\\n\",\n    \"    consumer = cls(auto_offset_reset=\\\"whatever\\\")\\n\",\n    \"    assert consumer._auto_offset_reset == \\\"whatever\\\"\\n\",\n    \"\\n\",\n    \"    await consumer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"28e4df06\",\n   \"metadata\": {},\n   \"source\": [\n    \"Patching start so that we don't try to start the real AIOKafkaConsumer instance\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6f53f9da\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaConsumer.start)\\n\",\n    \"async def start(self: InMemoryConsumer, **kwargs: Any) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Start consuming messages from the connected broker.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If start() has already been called without calling stop() first.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(\\\"AIOKafkaConsumer patched start() called()\\\")\\n\",\n    \"    if self._id is not None:\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            \\\"Consumer start() already called! Run consumer stop() before running start() again\\\"\\n\",\n    \"        )\\n\",\n    \"    self._id = self.broker.connect()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"072dd1f0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:04:33.493 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ConsumerClass = InMemoryConsumer(broker)\\n\",\n    \"\\n\",\n    \"for cls in [ConsumerClass]:\\n\",\n    \"    consumer = cls()\\n\",\n    \"    await consumer.start()\\n\",\n    \"    await consumer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"21c11cd3\",\n   \"metadata\": {},\n   \"source\": [\n    \"Patching subscribe so that we can connect to our Local, in-memory, Kafka broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1de72311\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch  # type: ignore\\n\",\n    \"@delegates(AIOKafkaConsumer.subscribe)\\n\",\n    \"def subscribe(self: InMemoryConsumer, topics: List[str], **kwargs: Any) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Subscribe to a list of topics for consuming messages.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topics: A list of topics to subscribe to.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If start() has not been called before calling subscribe().\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(\\\"AIOKafkaConsumer patched subscribe() called\\\")\\n\",\n    \"    if self._id is None:\\n\",\n    \"        raise RuntimeError(\\\"Consumer start() not called! Run consumer start() first\\\")\\n\",\n    \"    logger.info(f\\\"AIOKafkaConsumer.subscribe(), subscribing to: {topics}\\\")\\n\",\n    \"    for topic in topics:\\n\",\n    \"        self.broker.subscribe(\\n\",\n    \"            bootstrap_server=self._bootstrap_servers,\\n\",\n    \"            consumer_id=self._id,\\n\",\n    \"            topic=topic,\\n\",\n    \"            group=self._group_id,  # type: ignore\\n\",\n    \"        )\\n\",\n    \"        self._topics.append(topic)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"945e08d4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:04:37.008 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:04:37.010 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:04:37.010 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['my_topic']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ConsumerClass = InMemoryConsumer(broker)\\n\",\n    \"consumer = ConsumerClass()\\n\",\n    \"await consumer.start()\\n\",\n    \"consumer.subscribe([\\\"my_topic\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"dd80a4d5\",\n   \"metadata\": {},\n   \"source\": [\n    \"Patching stop so that be dont break anything by calling the real AIOKafkaConsumer stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dc82405e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaConsumer.stop)\\n\",\n    \"async def stop(self: InMemoryConsumer, **kwargs: Any) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Stop consuming messages from the connected broker.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If start() has not been called before calling stop().\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(\\\"AIOKafkaConsumer patched stop() called\\\")\\n\",\n    \"    if self._id is None:\\n\",\n    \"        raise RuntimeError(\\\"Consumer start() not called! Run consumer start() first\\\")\\n\",\n    \"    for topic in self._topics:\\n\",\n    \"        self.broker.unsubscribe(\\n\",\n    \"            bootstrap_server=self._bootstrap_servers,\\n\",\n    \"            topic=topic,\\n\",\n    \"            group=self._group_id,  # type: ignore\\n\",\n    \"            consumer_id=self._id,\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fc667214\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:04:38.785 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:04:38.786 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:04:38.787 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['my_topic']\\n\",\n      \"23-06-26 08:04:38.787 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ConsumerClass = InMemoryConsumer(broker)\\n\",\n    \"consumer = ConsumerClass()\\n\",\n    \"\\n\",\n    \"await consumer.start()\\n\",\n    \"consumer.subscribe([\\\"my_topic\\\"])\\n\",\n    \"await consumer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a0c733b4\",\n   \"metadata\": {},\n   \"source\": [\n    \"Patching getmany so that the messages are pulled from our Local, in-memory, Kafka broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0663c9b4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaConsumer.getmany)\\n\",\n    \"async def getmany(  # type: ignore\\n\",\n    \"    self: InMemoryConsumer, **kwargs: Any\\n\",\n    \") -> Dict[TopicPartition, List[ConsumerRecord]]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Retrieve messages from the subscribed topics.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A dictionary containing the retrieved messages from each topic partition.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If start() has not been called before calling getmany().\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    await asyncio.sleep(0)\\n\",\n    \"    for topic in self._topics:\\n\",\n    \"        return self.broker.read(\\n\",\n    \"            bootstrap_server=self._bootstrap_servers,\\n\",\n    \"            topic=topic,\\n\",\n    \"            consumer_id=self._id,  # type: ignore\\n\",\n    \"            group=self._group_id,  # type: ignore\\n\",\n    \"            auto_offset_reset=self._auto_offset_reset,\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a4940bac\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:04:40.641 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:04:40.644 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:04:40.646 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['my_topic']\\n\",\n      \"23-06-26 08:04:40.649 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ConsumerClass = InMemoryConsumer(broker)\\n\",\n    \"consumer = ConsumerClass(auto_offset_reset=\\\"latest\\\")\\n\",\n    \"\\n\",\n    \"await consumer.start()\\n\",\n    \"\\n\",\n    \"consumer.subscribe([\\\"my_topic\\\"])\\n\",\n    \"await consumer.getmany()\\n\",\n    \"\\n\",\n    \"await consumer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"723468f0\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Producer patching\\n\",\n    \"\\n\",\n    \"We need to patch AIOKafkaProducer methods so that we can redirect the producer to our local kafka broker\\n\",\n    \"\\n\",\n    \"- [x] \\\\_\\\\_init\\\\_\\\\_\\n\",\n    \"- [x] start\\n\",\n    \"- [x] stop\\n\",\n    \"- [x] send\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ad3d6b0f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class InMemoryProducer:\\n\",\n    \"    def __init__(self, broker: InMemoryBroker, **kwargs: Any) -> None:\\n\",\n    \"        self.broker = broker\\n\",\n    \"        self.id: Optional[uuid.UUID] = None\\n\",\n    \"        self._bootstrap_servers = \\\"\\\"\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer)\\n\",\n    \"    def __call__(self, **kwargs: Any) -> \\\"InMemoryProducer\\\":\\n\",\n    \"        defaults = _get_default_kwargs_from_sig(InMemoryConsumer.__call__, **kwargs)\\n\",\n    \"        producer_copy = InMemoryProducer(self.broker)\\n\",\n    \"        producer_copy._bootstrap_servers = (\\n\",\n    \"            \\\"\\\".join(defaults[\\\"bootstrap_servers\\\"])\\n\",\n    \"            if isinstance(defaults[\\\"bootstrap_servers\\\"], list)\\n\",\n    \"            else defaults[\\\"bootstrap_servers\\\"]\\n\",\n    \"        )\\n\",\n    \"        return producer_copy\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer.start)\\n\",\n    \"    async def start(self, **kwargs: Any) -> None:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer.stop)\\n\",\n    \"    async def stop(self, **kwargs: Any) -> None:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer.send)\\n\",\n    \"    async def send(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        topic: str,\\n\",\n    \"        msg: bytes,\\n\",\n    \"        key: Optional[bytes] = None,\\n\",\n    \"        **kwargs: Any,\\n\",\n    \"    ):\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer.partitions_for)\\n\",\n    \"    async def partitions_for(self, topic: str) -> List[int]:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer._partition)\\n\",\n    \"    def _partition(\\n\",\n    \"        self, topic: str, arg1: Any, arg2: Any, arg3: Any, key: bytes, arg4: Any\\n\",\n    \"    ) -> int:\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer.create_batch)\\n\",\n    \"    def create_batch(self) -> \\\"MockBatch\\\":\\n\",\n    \"        raise NotImplementedError()\\n\",\n    \"\\n\",\n    \"    @delegates(AIOKafkaProducer.send_batch)\\n\",\n    \"    async def send_batch(self, batch: \\\"MockBatch\\\", topic: str, partition: Any) -> None:\\n\",\n    \"        raise NotImplementedError()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e509e2a7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"producer_cls = InMemoryProducer(None)\\n\",\n    \"\\n\",\n    \"producer = producer_cls()\\n\",\n    \"assert producer._bootstrap_servers == \\\"localhost\\\"\\n\",\n    \"\\n\",\n    \"producer = producer_cls(bootstrap_servers=\\\"kafka.airt.ai\\\")\\n\",\n    \"assert producer._bootstrap_servers == \\\"kafka.airt.ai\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"bedf322a\",\n   \"metadata\": {},\n   \"source\": [\n    \"Patching AIOKafkaProducer start so that we mock the startup procedure of AIOKafkaProducer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"488ac5c3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch  # type: ignore\\n\",\n    \"@delegates(AIOKafkaProducer.start)\\n\",\n    \"async def start(self: InMemoryProducer, **kwargs: Any) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Start the in-memory producer.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If start() has already been called without calling stop() first.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(\\\"AIOKafkaProducer patched start() called()\\\")\\n\",\n    \"    if self.id is not None:\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            \\\"Producer start() already called! Run producer stop() before running start() again\\\"\\n\",\n    \"        )\\n\",\n    \"    self.id = self.broker.connect()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f250c614\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:04:43.984 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ProducerClass = InMemoryProducer(broker)\\n\",\n    \"producer = ProducerClass()\\n\",\n    \"\\n\",\n    \"await producer.start()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1f9e9be9\",\n   \"metadata\": {},\n   \"source\": [\n    \"Patching AIOKafkaProducerStop so that we don't uniintentionally try to stop a real instance of AIOKafkaProducer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"32412969\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch  # type: ignore\\n\",\n    \"@delegates(AIOKafkaProducer.stop)\\n\",\n    \"async def stop(self: InMemoryProducer, **kwargs: Any) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Stop the in-memory producer.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If start() has not been called before calling stop().\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(\\\"AIOKafkaProducer patched stop() called\\\")\\n\",\n    \"    if self.id is None:\\n\",\n    \"        raise RuntimeError(\\\"Producer start() not called! Run producer start() first\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9f4a1fbb\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:04:47.703 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:04:47.704 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ProducerClass = InMemoryProducer(broker)\\n\",\n    \"producer = ProducerClass()\\n\",\n    \"\\n\",\n    \"await producer.start()\\n\",\n    \"await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b2c77a56\",\n   \"metadata\": {},\n   \"source\": [\n    \"Patching AIOKafkaProducer send so that we redirect sent messages to Local, in-memory, Kafka broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3f42a03e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaProducer.send)\\n\",\n    \"async def send(  # type: ignore\\n\",\n    \"    self: InMemoryProducer,\\n\",\n    \"    topic: str,\\n\",\n    \"    msg: bytes,\\n\",\n    \"    key: Optional[bytes] = None,\\n\",\n    \"    partition: Optional[int] = None,\\n\",\n    \"    **kwargs: Any,\\n\",\n    \"):  # asyncio.Task[RecordMetadata]\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Send a message to the specified topic.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The topic to send the message to.\\n\",\n    \"        msg: The message to send.\\n\",\n    \"        key: The key associated with the message (optional).\\n\",\n    \"        partition: The partition to send the message to (optional).\\n\",\n    \"        **kwargs: Additional arguments to be passed to AIOKafkaProducer.send().\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A task that resolves to the RecordMetadata of the sent message.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If start() has not been called before calling send().\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if self.id is None:\\n\",\n    \"        raise RuntimeError(\\\"Producer start() not called! Run producer start() first\\\")\\n\",\n    \"\\n\",\n    \"    record = self.broker.write(\\n\",\n    \"        bootstrap_server=self._bootstrap_servers,\\n\",\n    \"        topic=topic,\\n\",\n    \"        value=msg,\\n\",\n    \"        key=key,\\n\",\n    \"        partition=partition,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    async def _f(record: ConsumerRecord = record) -> RecordMetadata:  # type: ignore\\n\",\n    \"        return record\\n\",\n    \"\\n\",\n    \"    return asyncio.create_task(_f())\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9fda1d9b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:05:09.942 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"RecordMetadata(topic='my_topic', partition=0, topic_partition=TopicPartition(topic='my_topic', partition=0), offset=0, timestamp=1680602752070, timestamp_type=0, log_start_offset=0)\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ProducerClass = InMemoryProducer(broker)\\n\",\n    \"producer = ProducerClass()\\n\",\n    \"\\n\",\n    \"await producer.start()\\n\",\n    \"msg_fut = await producer.send(\\\"my_topic\\\", b\\\"some_msg\\\")\\n\",\n    \"await msg_fut\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"eecd05b8\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaProducer.partitions_for)\\n\",\n    \"async def partitions_for(self: InMemoryProducer, topic: str) -> List[int]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Retrieve the list of partitions for the specified topic.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The topic to get the partitions for.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of partition IDs.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return [i for i in range(self.broker.num_partitions)]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ea87579a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"broker = InMemoryBroker(num_partitions=5)\\n\",\n    \"\\n\",\n    \"ProducerClass = InMemoryProducer(broker)\\n\",\n    \"producer = ProducerClass()\\n\",\n    \"\\n\",\n    \"assert len(await producer.partitions_for(\\\"some_topic\\\")) == 5\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2ab634be\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaProducer._partition)\\n\",\n    \"def _partition(\\n\",\n    \"    self: InMemoryProducer,\\n\",\n    \"    topic: str,\\n\",\n    \"    arg1: Any,\\n\",\n    \"    arg2: Any,\\n\",\n    \"    arg3: Any,\\n\",\n    \"    key: bytes,\\n\",\n    \"    arg4: Any,\\n\",\n    \") -> int:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Determine the partition to which the message should be sent.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The topic to send the message to.\\n\",\n    \"        arg1, arg2, arg3, arg4: Additional arguments passed to the original AIOKafkaProducer._partition().\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The partition ID.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return int(hashlib.sha256(key).hexdigest(), 16) % self.broker.num_partitions\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2038adc6\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"broker = InMemoryBroker(num_partitions=5)\\n\",\n    \"\\n\",\n    \"ProducerClass = InMemoryProducer(broker)\\n\",\n    \"producer = ProducerClass()\\n\",\n    \"\\n\",\n    \"partition = producer._partition(\\\"my_topic\\\", None, None, None, b\\\"key\\\", None)\\n\",\n    \"assert partition >= 0 and partition < 5\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c4fcacc5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class MockBatch:\\n\",\n    \"    def __init__(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initialize an instance of MockBatch.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self._batch: List[Tuple] = list()\\n\",\n    \"\\n\",\n    \"    def append(  # type: ignore\\n\",\n    \"        self, key: Optional[bytes], value: bytes, timestamp: int\\n\",\n    \"    ) -> RecordMetadata:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Append a message to the batch.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            key: The key associated with the message (optional).\\n\",\n    \"            value: The value of the message.\\n\",\n    \"            timestamp: The timestamp of the message.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The RecordMetadata of the appended message.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self._batch.append((key, value))\\n\",\n    \"        return RecordMetadata(\\n\",\n    \"            topic=\\\"\\\",\\n\",\n    \"            partition=0,\\n\",\n    \"            topic_partition=None,\\n\",\n    \"            offset=0,\\n\",\n    \"            timestamp=timestamp,\\n\",\n    \"            timestamp_type=0,\\n\",\n    \"            log_start_offset=0,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaProducer.create_batch)\\n\",\n    \"def create_batch(self: InMemoryProducer) -> \\\"MockBatch\\\":\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Create a mock batch for the in-memory producer.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A MockBatch instance.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return MockBatch()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(AIOKafkaProducer.send_batch)\\n\",\n    \"async def send_batch(\\n\",\n    \"    self: InMemoryProducer, batch: \\\"MockBatch\\\", topic: str, partition: Any\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Send a batch of messages to the specified topic and partition.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        batch: The MockBatch containing the messages to send.\\n\",\n    \"        topic: The topic to send the batch of messages to.\\n\",\n    \"        partition: The partition to send the batch of messages to.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    for record in batch._batch:\\n\",\n    \"        self.broker.write(\\n\",\n    \"            bootstrap_server=self._bootstrap_servers,\\n\",\n    \"            topic=topic,\\n\",\n    \"            value=record[1],\\n\",\n    \"            key=record[0],\\n\",\n    \"            partition=partition,\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"62c9f00b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:05:14.471 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:05:14.472 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:05:14.473 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:05:14.473 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['my_topic']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"bootstrap_server\\n\",\n    \"\\n\",\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ProducerClass = InMemoryProducer(broker)\\n\",\n    \"producer = ProducerClass()\\n\",\n    \"\\n\",\n    \"await producer.start()\\n\",\n    \"\\n\",\n    \"batch = producer.create_batch()\\n\",\n    \"batch.append(b\\\"key\\\", b\\\"value\\\", 1)\\n\",\n    \"\\n\",\n    \"partition = producer._partition(\\\"my_topic\\\", None, None, None, b\\\"key\\\", None)\\n\",\n    \"await producer.send_batch(batch, topic, partition=partition)\\n\",\n    \"\\n\",\n    \"ConsumerClass = InMemoryConsumer(broker)\\n\",\n    \"consumer = ConsumerClass(auto_offset_reset=\\\"earliest\\\")\\n\",\n    \"\\n\",\n    \"await consumer.start()\\n\",\n    \"\\n\",\n    \"consumer.subscribe([\\\"my_topic\\\"])\\n\",\n    \"msgs = await consumer.getmany()\\n\",\n    \"assert len(msgs[TopicPartition(topic='my_topic', partition=0)]) == 1\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3c37ae80\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:05:17.175 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:05:17.176 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:05:17.176 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:05:17.177 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['my_topic']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = InMemoryBroker()\\n\",\n    \"\\n\",\n    \"ProducerClass = InMemoryProducer(broker)\\n\",\n    \"producer = ProducerClass()\\n\",\n    \"\\n\",\n    \"await producer.start()\\n\",\n    \"\\n\",\n    \"batch = producer.create_batch()\\n\",\n    \"batch.append(b\\\"key\\\", b\\\"value\\\", 1)\\n\",\n    \"\\n\",\n    \"partitions = await producer.partitions_for(\\\"my_topic\\\")\\n\",\n    \"partition = random.choice(tuple(partitions))\\n\",\n    \"\\n\",\n    \"await producer.send_batch(batch, topic, partition=partition)\\n\",\n    \"\\n\",\n    \"ConsumerClass = InMemoryConsumer(broker)\\n\",\n    \"consumer = ConsumerClass(auto_offset_reset=\\\"earliest\\\")\\n\",\n    \"\\n\",\n    \"await consumer.start()\\n\",\n    \"\\n\",\n    \"consumer.subscribe([\\\"my_topic\\\"])\\n\",\n    \"msgs = await consumer.getmany()\\n\",\n    \"assert len(msgs[TopicPartition(topic='my_topic', partition=0)]) == 1\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"20e4d12b\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Add patching to InMemoryBroker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"446e0a24\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@contextmanager\\n\",\n    \"def lifecycle(self: InMemoryBroker) -> Iterator[InMemoryBroker]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Context manager for the lifecycle of the in-memory broker.\\n\",\n    \"\\n\",\n    \"    Yields:\\n\",\n    \"        An instance of the in-memory broker.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(\\n\",\n    \"        \\\"InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\\"\\n\",\n    \"    )\\n\",\n    \"    try:\\n\",\n    \"        logger.info(\\\"InMemoryBroker starting\\\")\\n\",\n    \"        \\n\",\n    \"        old_consumer = fastkafka._aiokafka_imports.AIOKafkaConsumer\\n\",\n    \"        old_producer = fastkafka._aiokafka_imports.AIOKafkaProducer\\n\",\n    \"        \\n\",\n    \"        fastkafka._aiokafka_imports.AIOKafkaConsumer = InMemoryConsumer(self)\\n\",\n    \"        fastkafka._aiokafka_imports.AIOKafkaProducer = InMemoryProducer(self)\\n\",\n    \"\\n\",\n    \"        self.is_started = True\\n\",\n    \"        yield self\\n\",\n    \"    finally:\\n\",\n    \"        logger.info(\\\"InMemoryBroker stopping\\\")\\n\",\n    \"\\n\",\n    \"        fastkafka._aiokafka_imports.AIOKafkaConsumer = old_consumer\\n\",\n    \"        fastkafka._aiokafka_imports.AIOKafkaProducer = old_producer\\n\",\n    \"\\n\",\n    \"        self.is_started = False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dc55ccb2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:05:22.344 [INFO] __main__: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-26 08:05:22.345 [INFO] __main__: InMemoryBroker starting\\n\",\n      \"23-06-26 08:05:22.345 [INFO] __main__: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"assert fastkafka._aiokafka_imports.AIOKafkaConsumer == AIOKafkaConsumer\\n\",\n    \"assert fastkafka._aiokafka_imports.AIOKafkaProducer == AIOKafkaProducer\\n\",\n    \"\\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    assert isinstance(fastkafka._aiokafka_imports.AIOKafkaConsumer, InMemoryConsumer)\\n\",\n    \"    assert isinstance(fastkafka._aiokafka_imports.AIOKafkaProducer, InMemoryProducer)\\n\",\n    \"    assert fastkafka._aiokafka_imports.AIOKafkaConsumer().broker == broker\\n\",\n    \"    assert fastkafka._aiokafka_imports.AIOKafkaProducer().broker == broker\\n\",\n    \"\\n\",\n    \"assert fastkafka._aiokafka_imports.AIOKafkaConsumer == AIOKafkaConsumer\\n\",\n    \"assert fastkafka._aiokafka_imports.AIOKafkaProducer == AIOKafkaProducer\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"5335aea0\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Broker, consumer and producer integration tests\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4275bf97\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@asynccontextmanager\\n\",\n    \"async def create_consumer_and_producer(\\n\",\n    \"    auto_offset_reset: str = \\\"latest\\\",\\n\",\n    \") -> AsyncIterator[Tuple[AIOKafkaConsumer, AIOKafkaProducer]]:\\n\",\n    \"    consumer = fastkafka._aiokafka_imports.AIOKafkaConsumer(\\n\",\n    \"        auto_offset_reset=auto_offset_reset\\n\",\n    \"    )\\n\",\n    \"    producer = fastkafka._aiokafka_imports.AIOKafkaProducer()\\n\",\n    \"\\n\",\n    \"    await consumer.start()\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    yield (consumer, producer)\\n\",\n    \"\\n\",\n    \"    await consumer.stop()\\n\",\n    \"    await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0a7688d2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def checkEqual(L1, L2):\\n\",\n    \"    return len(L1) == len(L2) and sorted(L1) == sorted(L2)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"800d6a47\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert checkEqual([1, 2], [3]) == False\\n\",\n    \"assert checkEqual([1, 2, 3], [3, 2, 1]) == True\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"ebc21a6e\",\n   \"metadata\": {},\n   \"source\": [\n    \"Sanity check, let's see if the messages are sent to broker and received by the consumer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f90249e0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:06:03.590 [INFO] __main__: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-26 08:06:03.591 [INFO] __main__: InMemoryBroker starting\\n\",\n      \"23-06-26 08:06:03.592 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:06:03.592 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:06:03.595 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:06:03.595 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:06:03.596 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:06:03.597 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-26 08:06:03.598 [INFO] __main__: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic = \\\"test_topic\\\"\\n\",\n    \"sent_msgs = [f\\\"msg{i}\\\".encode(\\\"UTF-8\\\") for i in range(320)]\\n\",\n    \"\\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    async with create_consumer_and_producer(auto_offset_reset=\\\"earliest\\\") as (\\n\",\n    \"        consumer,\\n\",\n    \"        producer,\\n\",\n    \"    ):\\n\",\n    \"        [await producer.send(topic, msg) for msg in sent_msgs]\\n\",\n    \"        consumer.subscribe([topic])\\n\",\n    \"        received = await consumer.getmany()\\n\",\n    \"        received_msgs = [msg.value for _, msgs in received.items() for msg in msgs]\\n\",\n    \"    assert checkEqual(\\n\",\n    \"        received_msgs, sent_msgs\\n\",\n    \"    ), f\\\"{sent_msgs=}\\\\n{received_msgs=}\\\\n{data=}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f40fa9ed\",\n   \"metadata\": {},\n   \"source\": [\n    \"Check if only subscribed topic messages are received by the consumer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"839a6755\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:06:11.830 [INFO] __main__: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-26 08:06:11.831 [INFO] __main__: InMemoryBroker starting\\n\",\n      \"23-06-26 08:06:11.832 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:06:11.832 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:06:11.833 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:06:11.833 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic1']\\n\",\n      \"23-06-26 08:06:11.834 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:06:11.834 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-26 08:06:11.834 [INFO] __main__: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic1 = \\\"test_topic1\\\"\\n\",\n    \"topic2 = \\\"test_topic2\\\"\\n\",\n    \"sent_msgs_1 = [(f\\\"msg{i}\\\" + topic1).encode(\\\"UTF-8\\\") for i in range(32)]\\n\",\n    \"sent_msgs_2 = [(f\\\"msg{i}\\\" + topic2).encode(\\\"UTF-8\\\") for i in range(32)]\\n\",\n    \"\\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    async with create_consumer_and_producer(auto_offset_reset=\\\"earliest\\\") as (\\n\",\n    \"        consumer,\\n\",\n    \"        producer,\\n\",\n    \"    ):\\n\",\n    \"        [await producer.send(topic1, msg) for msg in sent_msgs_1]\\n\",\n    \"        [await producer.send(topic2, msg) for msg in sent_msgs_2]\\n\",\n    \"\\n\",\n    \"        consumer.subscribe([topic1])\\n\",\n    \"        received = await consumer.getmany()\\n\",\n    \"        received_msgs = [msg.value for _, msgs in received.items() for msg in msgs]\\n\",\n    \"\\n\",\n    \"    assert checkEqual(sent_msgs_1, received_msgs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9bb1c5c5\",\n   \"metadata\": {},\n   \"source\": [\n    \"Check if msgs are received only after subscribing when auto_offset_reset is set to \\\"latest\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ed6bba51\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:06:17.222 [INFO] __main__: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-26 08:06:17.223 [INFO] __main__: InMemoryBroker starting\\n\",\n      \"23-06-26 08:06:17.223 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:06:17.224 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:06:17.224 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:06:17.225 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:06:17.226 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:06:17.226 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-26 08:06:17.227 [INFO] __main__: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic = \\\"test_topic\\\"\\n\",\n    \"sent_msgs_before = [f\\\"msg{i}\\\".encode(\\\"UTF-8\\\") for i in range(32)]\\n\",\n    \"sent_msgs_after = [f\\\"msg{i}\\\".encode(\\\"UTF-8\\\") for i in range(32, 64)]\\n\",\n    \"\\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    async with create_consumer_and_producer() as (consumer, producer):\\n\",\n    \"        [await producer.send(topic, msg) for msg in sent_msgs_before]\\n\",\n    \"\\n\",\n    \"        consumer.subscribe([topic])\\n\",\n    \"        received = await consumer.getmany()\\n\",\n    \"        [await producer.send(topic, msg) for msg in sent_msgs_after]\\n\",\n    \"        received = await consumer.getmany()\\n\",\n    \"        received_msgs = [msg.value for _, msgs in received.items() for msg in msgs]\\n\",\n    \"\\n\",\n    \"    assert checkEqual(sent_msgs_after, received_msgs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"83e9f43a\",\n   \"metadata\": {},\n   \"source\": [\n    \"Check two consumers different groups\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2db9c2ff\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:06:42.343 [INFO] __main__: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-26 08:06:42.344 [INFO] __main__: InMemoryBroker starting\\n\",\n      \"23-06-26 08:06:42.345 [ERROR] asyncio: Unclosed AIOKafkaConsumer\\n\",\n      \"consumer: <aiokafka.consumer.consumer.AIOKafkaConsumer object>\\n\",\n      \"23-06-26 08:06:42.345 [ERROR] asyncio: Unclosed AIOKafkaProducer\\n\",\n      \"producer: <aiokafka.producer.producer.AIOKafkaProducer object>\\n\",\n      \"23-06-26 08:06:42.345 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:06:42.346 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:06:42.346 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:06:42.347 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:06:42.347 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:06:42.347 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:06:42.348 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:06:42.349 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:06:42.350 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:06:42.350 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-26 08:06:42.351 [INFO] __main__: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic = \\\"test_topic\\\"\\n\",\n    \"sent_msgs = [f\\\"msg{i}\\\".encode(\\\"UTF-8\\\") for i in range(32)]\\n\",\n    \"\\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    consumer1 = fastkafka._aiokafka_imports.AIOKafkaConsumer(\\n\",\n    \"        auto_offset_reset=\\\"earliest\\\"\\n\",\n    \"    )\\n\",\n    \"    consumer2 = fastkafka._aiokafka_imports.AIOKafkaConsumer(\\n\",\n    \"        auto_offset_reset=\\\"earliest\\\"\\n\",\n    \"    )\\n\",\n    \"    producer = fastkafka._aiokafka_imports.AIOKafkaProducer()\\n\",\n    \"\\n\",\n    \"    await consumer1.start()\\n\",\n    \"    await consumer2.start()\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    [await producer.send(topic, msg) for msg in sent_msgs]\\n\",\n    \"\\n\",\n    \"    consumer1.subscribe([topic])\\n\",\n    \"    received1 = await consumer1.getmany()\\n\",\n    \"\\n\",\n    \"    consumer2.subscribe([topic])\\n\",\n    \"    received2 = await consumer2.getmany()\\n\",\n    \"\\n\",\n    \"    received_msgs1 = [msg.value for _, msgs in received1.items() for msg in msgs]\\n\",\n    \"    received_msgs2 = [msg.value for _, msgs in received2.items() for msg in msgs]\\n\",\n    \"\\n\",\n    \"    await consumer1.stop()\\n\",\n    \"    await consumer2.stop()\\n\",\n    \"    await producer.stop()\\n\",\n    \"\\n\",\n    \"    assert checkEqual(sent_msgs, received_msgs1), received_msgs1\\n\",\n    \"    assert checkEqual(sent_msgs, received_msgs2)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1687740a\",\n   \"metadata\": {},\n   \"source\": [\n    \"Check two consumers same group\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5c7ed5e2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:07:00.711 [INFO] __main__: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-26 08:07:00.711 [INFO] __main__: InMemoryBroker starting\\n\",\n      \"23-06-26 08:07:00.712 [ERROR] asyncio: Unclosed AIOKafkaConsumer\\n\",\n      \"consumer: <aiokafka.consumer.consumer.AIOKafkaConsumer object>\\n\",\n      \"23-06-26 08:07:00.712 [ERROR] asyncio: Unclosed AIOKafkaProducer\\n\",\n      \"producer: <aiokafka.producer.producer.AIOKafkaProducer object>\\n\",\n      \"23-06-26 08:07:00.713 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:07:00.713 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:07:00.713 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:07:00.714 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:07:00.714 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:07:00.715 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:07:00.715 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:07:00.715 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:07:00.716 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:07:00.716 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-26 08:07:00.716 [INFO] __main__: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic = \\\"test_topic\\\"\\n\",\n    \"sent_msgs = [f\\\"msg{i}\\\".encode(\\\"UTF-8\\\") for i in range(32)]\\n\",\n    \"\\n\",\n    \"with InMemoryBroker(num_partitions=5) as broker:\\n\",\n    \"    consumer1 = fastkafka._aiokafka_imports.AIOKafkaConsumer(\\n\",\n    \"        group_id=\\\"my_group\\\", auto_offset_reset=\\\"earliest\\\"\\n\",\n    \"    )\\n\",\n    \"    consumer2 = fastkafka._aiokafka_imports.AIOKafkaConsumer(\\n\",\n    \"        group_id=\\\"my_group\\\", auto_offset_reset=\\\"earliest\\\"\\n\",\n    \"    )\\n\",\n    \"    producer = fastkafka._aiokafka_imports.AIOKafkaProducer()\\n\",\n    \"\\n\",\n    \"    await consumer1.start()\\n\",\n    \"    await consumer2.start()\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    [await producer.send(topic, msg) for msg in sent_msgs]\\n\",\n    \"\\n\",\n    \"    consumer1.subscribe([topic])\\n\",\n    \"    consumer2.subscribe([topic])\\n\",\n    \"\\n\",\n    \"    received1 = await consumer1.getmany()\\n\",\n    \"    received2 = await consumer2.getmany()\\n\",\n    \"\\n\",\n    \"    received_msgs1 = [msg.value for _, msgs in received1.items() for msg in msgs]\\n\",\n    \"    received_msgs2 = [msg.value for _, msgs in received2.items() for msg in msgs]\\n\",\n    \"\\n\",\n    \"    await consumer1.stop()\\n\",\n    \"    await consumer2.stop()\\n\",\n    \"    await producer.stop()\\n\",\n    \"\\n\",\n    \"    assert checkEqual(sent_msgs, received_msgs1 + received_msgs2)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"470875ee\",\n   \"metadata\": {},\n   \"source\": [\n    \"Check for different bootstrap servers\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7e046307\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-26 08:07:14.565 [INFO] __main__: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-26 08:07:14.566 [INFO] __main__: InMemoryBroker starting\\n\",\n      \"23-06-26 08:07:14.567 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:07:14.567 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:07:14.568 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:07:14.569 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:07:14.570 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:07:14.570 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-26 08:07:14.570 [INFO] __main__: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-26 08:07:14.571 [INFO] __main__: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-26 08:07:14.572 [INFO] __main__: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-26 08:07:14.572 [INFO] __main__: AIOKafkaConsumer.subscribe(), subscribing to: ['test_topic']\\n\",\n      \"23-06-26 08:07:14.572 [INFO] __main__: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-26 08:07:14.573 [INFO] __main__: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-26 08:07:14.573 [INFO] __main__: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic = \\\"test_topic\\\"\\n\",\n    \"sent_msgs = [f\\\"msg{i}\\\".encode(\\\"UTF-8\\\") for i in range(32)]\\n\",\n    \"\\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    for server in [\\\"localhost:9092\\\", \\\"kafka.airt.ai\\\"]:\\n\",\n    \"        consumer = fastkafka._aiokafka_imports.AIOKafkaConsumer(\\n\",\n    \"            bootstrap_servers=server, auto_offset_reset=\\\"earliest\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        producer = fastkafka._aiokafka_imports.AIOKafkaProducer(\\n\",\n    \"            bootstrap_servers=server\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        await consumer.start()\\n\",\n    \"        await producer.start()\\n\",\n    \"\\n\",\n    \"        [await producer.send(topic, msg) for msg in sent_msgs]\\n\",\n    \"\\n\",\n    \"        consumer.subscribe([topic])\\n\",\n    \"        received = await consumer.getmany()\\n\",\n    \"\\n\",\n    \"        received_msgs = [msg.value for _, msgs in received.items() for msg in msgs]\\n\",\n    \"\\n\",\n    \"        await consumer.stop()\\n\",\n    \"        await producer.stop()\\n\",\n    \"\\n\",\n    \"        assert checkEqual(sent_msgs, received_msgs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d1f760f8\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/002_ApacheKafkaBroker.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"76c520c1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _testing.apache_kafka_broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"38d47b16\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import re\\n\",\n    \"import platform\\n\",\n    \"import socket\\n\",\n    \"import subprocess  # nosec\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"from datetime import datetime, timedelta\\n\",\n    \"from os import environ\\n\",\n    \"from pathlib import Path\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"import nest_asyncio\\n\",\n    \"\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._components.helpers import in_notebook\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import delegates, export, filter_using_signature, patch\\n\",\n    \"from fastkafka._components.test_dependencies import check_java, check_kafka\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c74ed82b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import shlex\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"\\n\",\n    \"import pytest\\n\",\n    \"\\n\",\n    \"from fastkafka._aiokafka_imports import AIOKafkaConsumer, AIOKafkaProducer\\n\",\n    \"from fastkafka._components.helpers import change_dir\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b81062e5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"if in_notebook():\\n\",\n    \"    from tqdm.notebook import tqdm\\n\",\n    \"else:\\n\",\n    \"    from tqdm import tqdm\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"49f95ea6\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"687ef020\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"5fded319\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Local Kafka\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d552eb74\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Kafka and zookeeper config helpers\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d1fa44d1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_zookeeper_config_string(\\n\",\n    \"    data_dir: Union[str, Path],  # the directory where the snapshot is stored.\\n\",\n    \"    zookeeper_port: int = 2181,  # the port at which the clients will connect\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Generates a zookeeeper configuration string that can be exported to file\\n\",\n    \"    and used to start a zookeeper instance.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        data_dir: Path to the directory where the zookeepeer instance will save data\\n\",\n    \"        zookeeper_port: Port for clients (Kafka brokes) to connect\\n\",\n    \"    Returns:\\n\",\n    \"        Zookeeper configuration string.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    zookeeper_data_dir = str((Path(data_dir) / \\\"zookeeper\\\").resolve())\\n\",\n    \"    if platform.system() == \\\"Windows\\\":\\n\",\n    \"        zookeeper_data_dir = zookeeper_data_dir.replace(\\\"\\\\\\\\\\\", \\\"/\\\")\\n\",\n    \"    zookeeper_config = f\\\"\\\"\\\"dataDir={zookeeper_data_dir}\\n\",\n    \"clientPort={zookeeper_port}\\n\",\n    \"maxClientCnxns=0\\n\",\n    \"admin.enableServer=false\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    return zookeeper_config\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"34bbd0b5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"p = Path(\\\"..\\\").resolve()\\n\",\n    \"data_dir = str(p).replace(\\\"\\\\\\\\\\\", \\\"/\\\") if platform.system() == \\\"Windows\\\" else str(p)\\n\",\n    \"assert (\\n\",\n    \"    get_zookeeper_config_string(data_dir=\\\"..\\\")\\n\",\n    \"    == f\\\"\\\"\\\"dataDir={data_dir}/zookeeper\\n\",\n    \"clientPort=2181\\n\",\n    \"maxClientCnxns=0\\n\",\n    \"admin.enableServer=false\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    get_zookeeper_config_string(data_dir=\\\"..\\\", zookeeper_port=100)\\n\",\n    \"    == f\\\"\\\"\\\"dataDir={data_dir}/zookeeper\\n\",\n    \"clientPort=100\\n\",\n    \"maxClientCnxns=0\\n\",\n    \"admin.enableServer=false\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"42d393bc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_kafka_config_string(\\n\",\n    \"    data_dir: Union[str, Path], zookeeper_port: int = 2181, listener_port: int = 9092\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Generates a kafka broker configuration string that can be exported to file\\n\",\n    \"    and used to start a kafka broker instance.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        data_dir: Path to the directory where the kafka broker instance will save data\\n\",\n    \"        zookeeper_port: Port on which the zookeeper instance is running\\n\",\n    \"        listener_port: Port on which the clients (producers and consumers) can connect\\n\",\n    \"    Returns:\\n\",\n    \"        Kafka broker configuration string.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    kafka_logs_dir = str((Path(data_dir) / \\\"kafka_logs\\\").resolve())\\n\",\n    \"    if platform.system() == \\\"Windows\\\":\\n\",\n    \"        kafka_logs_dir = kafka_logs_dir.replace(\\\"\\\\\\\\\\\", \\\"/\\\")\\n\",\n    \"    kafka_config = f\\\"\\\"\\\"broker.id=0\\n\",\n    \"\\n\",\n    \"############################# Socket Server Settings #############################\\n\",\n    \"\\n\",\n    \"# The address the socket server listens on. If not configured, the host name will be equal to the value of\\n\",\n    \"# java.net.InetAddress.getCanonicalHostName(), with PLAINTEXT listener name, and port 9092.\\n\",\n    \"#   FORMAT:\\n\",\n    \"#     listeners = listener_name://host_name:port\\n\",\n    \"#   EXAMPLE:\\n\",\n    \"#     listeners = PLAINTEXT://your.host.name:9092\\n\",\n    \"listeners=PLAINTEXT://:{listener_port}\\n\",\n    \"\\n\",\n    \"# Listener name, hostname and port the broker will advertise to clients.\\n\",\n    \"# If not set, it uses the value for \\\"listeners\\\".\\n\",\n    \"# advertised.listeners=PLAINTEXT://localhost:{listener_port}\\n\",\n    \"\\n\",\n    \"# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details\\n\",\n    \"#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL\\n\",\n    \"\\n\",\n    \"# The number of threads that the server uses for receiving requests from the network and sending responses to the network\\n\",\n    \"num.network.threads=3\\n\",\n    \"\\n\",\n    \"# The number of threads that the server uses for processing requests, which may include disk I/O\\n\",\n    \"num.io.threads=8\\n\",\n    \"\\n\",\n    \"# The send buffer (SO_SNDBUF) used by the socket server\\n\",\n    \"socket.send.buffer.bytes=102400\\n\",\n    \"\\n\",\n    \"# The receive buffer (SO_RCVBUF) used by the socket server\\n\",\n    \"socket.receive.buffer.bytes=102400\\n\",\n    \"\\n\",\n    \"# The maximum size of a request that the socket server will accept (protection against OOM)\\n\",\n    \"socket.request.max.bytes=104857600\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"############################# Log Basics #############################\\n\",\n    \"\\n\",\n    \"# A comma separated list of directories under which to store log files\\n\",\n    \"log.dirs={kafka_logs_dir}\\n\",\n    \"\\n\",\n    \"# The default number of log partitions per topic. More partitions allow greater\\n\",\n    \"# parallelism for consumption, but this will also result in more files across\\n\",\n    \"# the brokers.\\n\",\n    \"num.partitions=1\\n\",\n    \"\\n\",\n    \"# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown.\\n\",\n    \"# This value is recommended to be increased for installations with data dirs located in RAID array.\\n\",\n    \"num.recovery.threads.per.data.dir=1\\n\",\n    \"\\n\",\n    \"offsets.topic.replication.factor=1\\n\",\n    \"transaction.state.log.replication.factor=1\\n\",\n    \"transaction.state.log.min.isr=1\\n\",\n    \"\\n\",\n    \"# The number of messages to accept before forcing a flush of data to disk\\n\",\n    \"log.flush.interval.messages=10000\\n\",\n    \"\\n\",\n    \"# The maximum amount of time a message can sit in a log before we force a flush\\n\",\n    \"log.flush.interval.ms=1000\\n\",\n    \"\\n\",\n    \"# The minimum age of a log file to be eligible for deletion due to age\\n\",\n    \"log.retention.hours=168\\n\",\n    \"\\n\",\n    \"# A size-based retention policy for logs. Segments are pruned from the log unless the remaining\\n\",\n    \"# segments drop below log.retention.bytes. Functions independently of log.retention.hours.\\n\",\n    \"log.retention.bytes=1073741824\\n\",\n    \"\\n\",\n    \"# The maximum size of a log segment file. When this size is reached a new log segment will be created.\\n\",\n    \"log.segment.bytes=1073741824\\n\",\n    \"\\n\",\n    \"# The interval at which log segments are checked to see if they can be deleted according to the retention policies\\n\",\n    \"log.retention.check.interval.ms=300000\\n\",\n    \"\\n\",\n    \"# Zookeeper connection string (see zookeeper docs for details).\\n\",\n    \"zookeeper.connect=localhost:{zookeeper_port}\\n\",\n    \"\\n\",\n    \"# Timeout in ms for connecting to zookeeper\\n\",\n    \"zookeeper.connection.timeout.ms=18000\\n\",\n    \"\\n\",\n    \"# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance.\\n\",\n    \"group.initial.rebalance.delay.ms=0\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    return kafka_config\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2582427a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"p = Path(\\\"..\\\").resolve()\\n\",\n    \"data_dir = str(p).replace(\\\"\\\\\\\\\\\", \\\"/\\\") if platform.system() == \\\"Windows\\\" else str(p)\\n\",\n    \"actual = get_kafka_config_string(data_dir=\\\"..\\\", listener_port=9999)\\n\",\n    \"assert f\\\"log.dirs={data_dir}/kafka_logs\\\" in actual\\n\",\n    \"assert \\\"listeners=PLAINTEXT://:9999\\\" in actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"873f5b20\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.testing\\\")\\n\",\n    \"class ApacheKafkaBroker:\\n\",\n    \"    \\\"\\\"\\\"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    @delegates(get_kafka_config_string, but=[\\\"data_dir\\\"])\\n\",\n    \"    @delegates(get_zookeeper_config_string, keep=True, but=[\\\"data_dir\\\"])\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        topics: Iterable[str] = [],\\n\",\n    \"        *,\\n\",\n    \"        retries: int = 3,\\n\",\n    \"        apply_nest_asyncio: bool = False,\\n\",\n    \"        **kwargs: Dict[str, Any],\\n\",\n    \"    ):\\n\",\n    \"        \\\"\\\"\\\"Initialises the ApacheKafkaBroker object\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            topics: List of topics to create after sucessfull Kafka broker startup\\n\",\n    \"            retries: Number of retries to create kafka and zookeeper services using random\\n\",\n    \"            apply_nest_asyncio: set to True if running in notebook\\n\",\n    \"            zookeeper_port: Port for clients (Kafka brokes) to connect\\n\",\n    \"            listener_port: Port on which the clients (producers and consumers) can connect\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.zookeeper_kwargs = filter_using_signature(\\n\",\n    \"            get_zookeeper_config_string, **kwargs\\n\",\n    \"        )\\n\",\n    \"        self.kafka_kwargs = filter_using_signature(get_kafka_config_string, **kwargs)\\n\",\n    \"\\n\",\n    \"        if \\\"zookeeper_port\\\" not in self.zookeeper_kwargs:\\n\",\n    \"            self.zookeeper_kwargs[\\\"zookeeper_port\\\"] = 2181\\n\",\n    \"            self.kafka_kwargs[\\\"zookeeper_port\\\"] = 2181\\n\",\n    \"\\n\",\n    \"        if \\\"listener_port\\\" not in self.kafka_kwargs:\\n\",\n    \"            self.kafka_kwargs[\\\"listener_port\\\"] = 9092\\n\",\n    \"\\n\",\n    \"        self.retries = retries\\n\",\n    \"        self.apply_nest_asyncio = apply_nest_asyncio\\n\",\n    \"        self.temporary_directory: Optional[TemporaryDirectory] = None\\n\",\n    \"        self.temporary_directory_path: Optional[Path] = None\\n\",\n    \"        self.kafka_task: Optional[asyncio.subprocess.Process] = None\\n\",\n    \"        self.zookeeper_task: Optional[asyncio.subprocess.Process] = None\\n\",\n    \"        self._is_started = False\\n\",\n    \"        self.topics: Iterable[str] = topics\\n\",\n    \"\\n\",\n    \"    @property\\n\",\n    \"    def is_started(self) -> bool:\\n\",\n    \"        \\\"\\\"\\\"Property indicating whether the ApacheKafkaBroker object is started.\\n\",\n    \"\\n\",\n    \"        The is_started property indicates if the ApacheKafkaBroker object is currently\\n\",\n    \"        in a started state. This implies that Zookeeper and Kafka broker processes have\\n\",\n    \"        sucesfully started and are ready for handling events.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            bool: True if the object is started, False otherwise.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return self._is_started\\n\",\n    \"\\n\",\n    \"    @classmethod\\n\",\n    \"    def _check_deps(cls) -> None:\\n\",\n    \"        \\\"\\\"\\\"Prepares the environment for running Kafka brokers.\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _start(self) -> str:\\n\",\n    \"        \\\"\\\"\\\"Starts a local kafka broker and zookeeper instance asynchronously\\n\",\n    \"        Returns:\\n\",\n    \"           Kafka broker bootstrap server address in string format: add:port\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def start(self) -> str:\\n\",\n    \"        \\\"\\\"\\\"Starts a local kafka broker and zookeeper instance synchronously\\n\",\n    \"        Returns:\\n\",\n    \"           Kafka broker bootstrap server address in string format: add:port\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def stop(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Stops a local kafka broker and zookeeper instance synchronously\\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _stop(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Stops a local kafka broker and zookeeper instance synchronously\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def get_service_config_string(self, service: str, *, data_dir: Path) -> str:\\n\",\n    \"        \\\"\\\"\\\"Generates a configuration for a service\\n\",\n    \"        Args:\\n\",\n    \"            data_dir: Path to the directory where the zookeepeer instance will save data\\n\",\n    \"            service: \\\"kafka\\\" or \\\"zookeeper\\\", defines which service to get config string for\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _start_service(self, service: str = \\\"kafka\\\") -> None:\\n\",\n    \"        \\\"\\\"\\\"Starts the service according to defined service var\\n\",\n    \"        Args:\\n\",\n    \"            service: \\\"kafka\\\" or \\\"zookeeper\\\", defines which service to start\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _start_zookeeper(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Start a local zookeeper instance\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _start_kafka(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Start a local kafka broker\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _create_topics(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Create missing topics in local Kafka broker\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def __enter__(self) -> str:\\n\",\n    \"        #         ApacheKafkaBroker._check_deps()\\n\",\n    \"        return self.start()\\n\",\n    \"\\n\",\n    \"    def __exit__(self, *args: Any, **kwargs: Any) -> None:\\n\",\n    \"        self.stop()\\n\",\n    \"\\n\",\n    \"    async def __aenter__(self) -> str:\\n\",\n    \"        #         ApacheKafkaBroker._check_deps()\\n\",\n    \"        return await self._start()\\n\",\n    \"\\n\",\n    \"    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\\n\",\n    \"        await self._stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6cbd5808\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# print(combine_params(combine_params(ApacheKafkaBroker, get_kafka_config_string), get_zookeeper_config_string).__doc__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"167099d2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch(cls_method=True)  # type: ignore\\n\",\n    \"def _check_deps(cls: ApacheKafkaBroker) -> None:\\n\",\n    \"    \\\"\\\"\\\"Checks the dependencies required to run Apache KafkaBroker.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If JDK installation or Kafka installation is not found.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if not check_java():\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            \\\"JDK installation not found! Please install JDK manually or run 'fastkafka testing install_deps'.\\\"\\n\",\n    \"        )\\n\",\n    \"    if not check_kafka():\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            \\\"Kafka installation not found! Please install Kafka tools manually or run 'fastkafka testing install_deps'.\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b4c383b8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"check_kafka()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d4600ad5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# TODO: test\\n\",\n    \"\\n\",\n    \"broker = ApacheKafkaBroker()\\n\",\n    \"broker._check_deps()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e5d74671\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def run_and_match(\\n\",\n    \"    *args: str,\\n\",\n    \"    capture: str = \\\"stdout\\\",\\n\",\n    \"    timeout: int = 5,\\n\",\n    \"    pattern: str,\\n\",\n    \"    num_to_match: int = 1,\\n\",\n    \") -> asyncio.subprocess.Process:\\n\",\n    \"    \\\"\\\"\\\"Runs a command asynchronously and matches the output against a pattern.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        *args: Command-line arguments for the subprocess.\\n\",\n    \"        capture: Which output to capture (\\\"stdout\\\" or \\\"stderr\\\").\\n\",\n    \"        timeout: Timeout in seconds for reading the output.\\n\",\n    \"        pattern: Regular expression pattern to match in the output.\\n\",\n    \"        num_to_match: Number of matches to wait for.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The subprocess process object.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If the capture parameter has an unsupported value.\\n\",\n    \"        TimeoutError: If the process times out.\\n\",\n    \"        RuntimeError: If the process returns a non-zero return code.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    # Create the subprocess; redirect the standard output\\n\",\n    \"    # into a pipe.\\n\",\n    \"    matched = 0\\n\",\n    \"\\n\",\n    \"    if platform.system() == \\\"Windows\\\":\\n\",\n    \"        proc = await asyncio.create_subprocess_shell(\\n\",\n    \"            \\\" \\\".join(args),\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"            creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,  # type: ignore\\n\",\n    \"        )\\n\",\n    \"    else:\\n\",\n    \"        proc = await asyncio.create_subprocess_exec(\\n\",\n    \"            *args,\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    # Read one line of output.\\n\",\n    \"    t = datetime.now()\\n\",\n    \"    while datetime.now() - t < timedelta(seconds=timeout):\\n\",\n    \"        try:\\n\",\n    \"            if capture == \\\"stdout\\\":\\n\",\n    \"                data = await asyncio.wait_for(proc.stdout.readline(), timeout=1.0)  # type: ignore\\n\",\n    \"            elif capture == \\\"stderr\\\":\\n\",\n    \"                data = await asyncio.wait_for(proc.stderr.readline(), timeout=1.0)  # type: ignore\\n\",\n    \"            else:\\n\",\n    \"                raise ValueError(\\n\",\n    \"                    f\\\"Unknown capture param value {capture}, supported values are 'stdout', 'stderr'\\\"\\n\",\n    \"                )\\n\",\n    \"            ddata = data.decode(\\\"utf-8\\\")\\n\",\n    \"\\n\",\n    \"            if len(re.findall(pattern, ddata)) > 0:\\n\",\n    \"                # print(f\\\"Matched: {ddata}\\\")\\n\",\n    \"                matched += 1\\n\",\n    \"                if matched == num_to_match:\\n\",\n    \"                    return proc\\n\",\n    \"        except asyncio.exceptions.TimeoutError as e:\\n\",\n    \"            pass\\n\",\n    \"\\n\",\n    \"        if proc.returncode is not None:\\n\",\n    \"            stdout, stderr = await proc.communicate()\\n\",\n    \"            dstdout = stdout.decode(\\\"utf-8\\\")\\n\",\n    \"            dstderr = stderr.decode(\\\"utf-8\\\")\\n\",\n    \"            if proc.returncode == 0:\\n\",\n    \"                raise TimeoutError(\\n\",\n    \"                    f\\\"stdout={dstdout}, stderr={dstderr}, returncode={proc.returncode}\\\"\\n\",\n    \"                )\\n\",\n    \"            else:\\n\",\n    \"                raise RuntimeError(\\n\",\n    \"                    f\\\"stdout={dstdout}, stderr={dstderr}, returncode={proc.returncode}\\\"\\n\",\n    \"                )\\n\",\n    \"\\n\",\n    \"    await terminate_asyncio_process(proc)\\n\",\n    \"\\n\",\n    \"    raise TimeoutError()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a682b7da\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 46756...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 46756 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"cmd = \\\"import datetime; from time import sleep; sleep(3); print('time is:' + str(datetime.datetime.now()))\\\"\\n\",\n    \"if platform.system() == \\\"Windows\\\":\\n\",\n    \"    cmd = \\\"\\\\\\\"import datetime; from time import sleep; sleep(3); print('time is:' + str(datetime.datetime.now()))\\\\\\\"\\\"\\n\",\n    \"\\n\",\n    \"with pytest.raises(TimeoutError):\\n\",\n    \"    proc = await run_and_match(\\\"python3\\\", \\\"-c\\\", cmd, pattern=\\\"time is\\\", timeout=1)\\n\",\n    \"\\n\",\n    \"with pytest.raises(RuntimeError):\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        \\\"python3\\\", \\\"-c\\\", \\\"should break on this\\\", pattern=\\\"time is\\\", timeout=5\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"proc = await run_and_match(\\\"python3\\\", \\\"-c\\\", cmd, pattern=\\\"time is\\\", timeout=10)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d27a4fd1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"cmd = \\\"import datetime; from time import sleep; sleep(3); print('time is:' + str(datetime.datetime.now())); print('time is:' + str(datetime.datetime.now()))\\\"\\n\",\n    \"if platform.system() == \\\"Windows\\\":\\n\",\n    \"    cmd = \\\"\\\\\\\"import datetime; from time import sleep; sleep(3); print('time is:' + str(datetime.datetime.now())); print('time is:' + str(datetime.datetime.now()))\\\\\\\"\\\"\\n\",\n    \"\\n\",\n    \"with pytest.raises(TimeoutError):\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        \\\"python3\\\", \\\"-c\\\", cmd, pattern=\\\"time is\\\", timeout=5, num_to_match=3\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"proc = await run_and_match(\\n\",\n    \"    \\\"python3\\\", \\\"-c\\\", cmd, pattern=\\\"time is\\\", timeout=10, num_to_match=2\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"19ef0a81\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def is_port_in_use(port: Union[int, str]) -> bool:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Checks if a port is already in use.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        port (Union[int, str]): The port number to check. It can be provided as an integer or a string.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        bool: True if the port is in use, False otherwise.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\\n\",\n    \"        return s.connect_ex((\\\"localhost\\\", int(port))) == 0\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e84c49c7\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Socket is listening on port 9969\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def bind_to_port(port):\\n\",\n    \"    try:\\n\",\n    \"        # Create a socket object\\n\",\n    \"        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\\n\",\n    \"        # Set the socket option to reuse the address\\n\",\n    \"        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\\n\",\n    \"        # Bind the socket to the specified port\\n\",\n    \"        sock.bind((\\\"localhost\\\", port))\\n\",\n    \"        # Listen for incoming connections\\n\",\n    \"        sock.listen(1)\\n\",\n    \"        print(f\\\"Socket is listening on port {port}\\\")\\n\",\n    \"        return sock\\n\",\n    \"    except Exception as e:\\n\",\n    \"        print(f\\\"Failed to bind to port {port}: {e}\\\")\\n\",\n    \"        return None\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_port = 9969\\n\",\n    \"assert not is_port_in_use(port=test_port)\\n\",\n    \"\\n\",\n    \"s = bind_to_port(test_port)\\n\",\n    \"assert is_port_in_use(port=test_port)\\n\",\n    \"if s:\\n\",\n    \"    s.close()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c7a076ff\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"def get_free_port() -> str:\\n\",\n    \"    \\\"\\\"\\\"Gets a port number which is available and free in the system.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The free port number as a string.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    s = socket.socket()\\n\",\n    \"    s.bind((\\\"127.0.0.1\\\", 0))\\n\",\n    \"    port = str(s.getsockname()[1])\\n\",\n    \"    s.close()\\n\",\n    \"    return port\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def write_config_and_run(\\n\",\n    \"    config: str, config_path: Union[str, Path], run_cmd: str\\n\",\n    \") -> asyncio.subprocess.Process:\\n\",\n    \"    \\\"\\\"\\\"Writes the configuration to a file, and runs a command using the configuration.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        config: The configuration string.\\n\",\n    \"        config_path: Path to the configuration file.\\n\",\n    \"        run_cmd: The command to run.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The subprocess process object.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    with open(config_path, \\\"w\\\") as f:\\n\",\n    \"        f.write(config)\\n\",\n    \"\\n\",\n    \"    return await asyncio.create_subprocess_exec(\\n\",\n    \"        run_cmd,\\n\",\n    \"        config_path,\\n\",\n    \"        stdout=asyncio.subprocess.PIPE,\\n\",\n    \"        stdin=asyncio.subprocess.PIPE,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def get_service_config_string(\\n\",\n    \"    self: ApacheKafkaBroker, service: str, *, data_dir: Path\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Gets the configuration string for a service.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        service: Name of the service (\\\"kafka\\\" or \\\"zookeeper\\\").\\n\",\n    \"        data_dir: Path to the directory where the service will save data.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The service configuration string.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    service_kwargs = getattr(self, f\\\"{service}_kwargs\\\")\\n\",\n    \"    if service == \\\"kafka\\\":\\n\",\n    \"        return get_kafka_config_string(data_dir=data_dir, **service_kwargs)\\n\",\n    \"    else:\\n\",\n    \"        return get_zookeeper_config_string(data_dir=data_dir, **service_kwargs)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _start_service(self: ApacheKafkaBroker, service: str = \\\"kafka\\\") -> None:\\n\",\n    \"    \\\"\\\"\\\"Starts a service (kafka or zookeeper) asynchronously.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        service: Name of the service (\\\"kafka\\\" or \\\"zookeeper\\\").\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(f\\\"Starting {service}...\\\")\\n\",\n    \"\\n\",\n    \"    if self.temporary_directory_path is None:\\n\",\n    \"        raise ValueError(\\n\",\n    \"            \\\"ApacheKafkaBroker._start_service(): self.temporary_directory_path is None, did you initialise it?\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    configs_tried: List[Dict[str, Any]] = []\\n\",\n    \"\\n\",\n    \"    for i in range(self.retries + 1):\\n\",\n    \"        configs_tried = configs_tried + [getattr(self, f\\\"{service}_kwargs\\\").copy()]\\n\",\n    \"\\n\",\n    \"        service_config_path = self.temporary_directory_path / f\\\"{service}.properties\\\"\\n\",\n    \"\\n\",\n    \"        with open(service_config_path, \\\"w\\\") as f:\\n\",\n    \"            f.write(\\n\",\n    \"                self.get_service_config_string(\\n\",\n    \"                    service, data_dir=self.temporary_directory_path\\n\",\n    \"                )\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            port = (\\n\",\n    \"                self.zookeeper_kwargs[\\\"zookeeper_port\\\"]\\n\",\n    \"                if service == \\\"zookeeper\\\"\\n\",\n    \"                else self.kafka_kwargs[\\\"listener_port\\\"]\\n\",\n    \"            )\\n\",\n    \"            if is_port_in_use(port):\\n\",\n    \"                raise ValueError(f\\\"Port {port} is already in use\\\")\\n\",\n    \"\\n\",\n    \"            script_extension = \\\"bat\\\" if platform.system() == \\\"Windows\\\" else \\\"sh\\\"\\n\",\n    \"            service_start_script = f\\\"{service}-server-start.{script_extension}\\\"\\n\",\n    \"            service_task = await run_and_match(\\n\",\n    \"                service_start_script,\\n\",\n    \"                str(service_config_path),\\n\",\n    \"                pattern=\\\"Recorded new controller, from now on will use node\\\"\\n\",\n    \"                if service == \\\"kafka\\\"\\n\",\n    \"                else \\\"INFO Snapshot taken\\\",\\n\",\n    \"                timeout=30,\\n\",\n    \"            )\\n\",\n    \"        except Exception as e:\\n\",\n    \"            print(e)\\n\",\n    \"            logger.info(\\n\",\n    \"                f\\\"{service} startup failed, generating a new port and retrying...\\\"\\n\",\n    \"            )\\n\",\n    \"            port = get_free_port()\\n\",\n    \"            if service == \\\"zookeeper\\\":\\n\",\n    \"                self.zookeeper_kwargs[\\\"zookeeper_port\\\"] = port\\n\",\n    \"                self.kafka_kwargs[\\\"zookeeper_port\\\"] = port\\n\",\n    \"            else:\\n\",\n    \"                self.kafka_kwargs[\\\"listener_port\\\"] = port\\n\",\n    \"\\n\",\n    \"            logger.info(f\\\"{service} new port={port}\\\")\\n\",\n    \"        else:\\n\",\n    \"            setattr(self, f\\\"{service}_task\\\", service_task)\\n\",\n    \"            return\\n\",\n    \"\\n\",\n    \"    raise ValueError(f\\\"Could not start {service} with params: {configs_tried}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _start_kafka(self: ApacheKafkaBroker) -> None:\\n\",\n    \"    \\\"\\\"\\\"Starts a local Kafka broker asynchronously.\\\"\\\"\\\"\\n\",\n    \"    return await self._start_service(\\\"kafka\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _start_zookeeper(self: ApacheKafkaBroker) -> None:\\n\",\n    \"    \\\"\\\"\\\"Starts a local ZooKeeper instance asynchronously.\\\"\\\"\\\"\\n\",\n    \"    return await self._start_service(\\\"zookeeper\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _create_topics(self: ApacheKafkaBroker) -> None:\\n\",\n    \"    \\\"\\\"\\\"Creates missing topics in a local Kafka broker asynchronously.\\\"\\\"\\\"\\n\",\n    \"    listener_port = self.kafka_kwargs.get(\\\"listener_port\\\", 9092)\\n\",\n    \"    bootstrap_server = f\\\"127.0.0.1:{listener_port}\\\"\\n\",\n    \"\\n\",\n    \"    script_extension = \\\"bat\\\" if platform.system() == \\\"Windows\\\" else \\\"sh\\\"\\n\",\n    \"    topics_script = f\\\"kafka-topics.{script_extension}\\\"\\n\",\n    \"    async with asyncer.create_task_group() as tg:\\n\",\n    \"        processes = [\\n\",\n    \"            tg.soonify(asyncio.create_subprocess_exec)(\\n\",\n    \"                topics_script,\\n\",\n    \"                \\\"--create\\\",\\n\",\n    \"                f\\\"--topic={topic}\\\",\\n\",\n    \"                f\\\"--bootstrap-server={bootstrap_server}\\\",\\n\",\n    \"                stdout=asyncio.subprocess.PIPE,\\n\",\n    \"                stdin=asyncio.subprocess.PIPE,\\n\",\n    \"            )\\n\",\n    \"            for topic in self.topics\\n\",\n    \"        ]\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"        return_values = [\\n\",\n    \"            await asyncio.wait_for(process.value.wait(), 30) for process in processes\\n\",\n    \"        ]\\n\",\n    \"        if any(return_value != 0 for return_value in return_values):\\n\",\n    \"            raise ValueError(\\\"Could not create missing topics!\\\")\\n\",\n    \"    except asyncio.TimeoutError as _:\\n\",\n    \"        raise ValueError(\\\"Timed out while creating missing topics!\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _start(self: ApacheKafkaBroker) -> str:\\n\",\n    \"    \\\"\\\"\\\"Starts a local Kafka broker and ZooKeeper instance asynchronously.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The Kafka broker bootstrap server address in string format: host:port.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    self._check_deps()\\n\",\n    \"\\n\",\n    \"    self.temporary_directory = TemporaryDirectory()\\n\",\n    \"    self.temporary_directory_path = Path(self.temporary_directory.__enter__())\\n\",\n    \"\\n\",\n    \"    await self._start_zookeeper()\\n\",\n    \"    await self._start_kafka()\\n\",\n    \"\\n\",\n    \"    listener_port = self.kafka_kwargs.get(\\\"listener_port\\\", 9092)\\n\",\n    \"    bootstrap_server = f\\\"127.0.0.1:{listener_port}\\\"\\n\",\n    \"    logger.info(f\\\"Local Kafka broker up and running on {bootstrap_server}\\\")\\n\",\n    \"\\n\",\n    \"    await self._create_topics()\\n\",\n    \"\\n\",\n    \"    self._is_started = True\\n\",\n    \"\\n\",\n    \"    return bootstrap_server\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _stop(self: ApacheKafkaBroker) -> None:\\n\",\n    \"    \\\"\\\"\\\"Stops a local Kafka broker and ZooKeeper instance asynchronously.\\\"\\\"\\\"\\n\",\n    \"    await terminate_asyncio_process(self.kafka_task)  # type: ignore\\n\",\n    \"    await terminate_asyncio_process(self.zookeeper_task)  # type: ignore\\n\",\n    \"    self.temporary_directory.__exit__(None, None, None)  # type: ignore\\n\",\n    \"    self._is_started = False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"febe12c9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 47147...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 47147 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 46766...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 46766 terminated.\\n\",\n      \"**************************************************ZOOKEEPER LOGS++++++++++++++++++++++++++++++++++++++++++++++++++\\n\",\n      \"[2023-06-23 12:28:08,810] INFO PrepRequestProcessor (sid:0) started, reconfigEnabled=false (org.apache.zookeeper.server.PrepRequestProcessor)\\n\",\n      \"[2023-06-23 12:28:08,810] INFO zookeeper.request_throttler.shutdownTimeout = 10000 (org.apache.zookeeper.server.RequestThrottler)\\n\",\n      \"[2023-06-23 12:28:08,821] INFO Using checkIntervalMs=60000 maxPerMinute=10000 maxNeverUsedIntervalMs=0 (org.apache.zookeeper.server.ContainerManager)\\n\",\n      \"[2023-06-23 12:28:08,822] INFO ZooKeeper audit is disabled. (org.apache.zookeeper.audit.ZKAuditProvider)\\n\",\n      \"[2023-06-23 12:28:09,626] INFO Creating new log file: log.1 (org.apache.zookeeper.server.persistence.FileTxnLog)\\n\",\n      \"\\n\",\n      \"**************************************************KAFKA LOGS++++++++++++++++++++++++++++++++++++++++++++++++++\\n\",\n      \"[2023-06-23 12:28:10,617] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Recorded new controller, from now on will use node kumaran-fastkafka-devel:29092 (id: 0 rack: null) (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:11,583] INFO Terminating process due to signal SIGTERM (org.apache.kafka.common.utils.LoggingSignalHandler)\\n\",\n      \"[2023-06-23 12:28:11,584] INFO [KafkaServer id=0] shutting down (kafka.server.KafkaServer)\\n\",\n      \"[2023-06-23 12:28:11,585] INFO [KafkaServer id=0] Starting controlled shutdown (kafka.server.KafkaServer)\\n\",\n      \"[2023-06-23 12:28:11,596] INFO [KafkaServer id=0] Controlled shutdown request returned successfully after 7ms (kafka.server.KafkaServer)\\n\",\n      \"[2023-06-23 12:28:11,598] INFO [/config/changes-event-process-thread]: Shutting down (kafka.common.ZkNodeChangeNotificationListener$ChangeEventProcessThread)\\n\",\n      \"[2023-06-23 12:28:11,599] INFO [/config/changes-event-process-thread]: Stopped (kafka.common.ZkNodeChangeNotificationListener$ChangeEventProcessThread)\\n\",\n      \"[2023-06-23 12:28:11,599] INFO [/config/changes-event-process-thread]: Shutdown completed (kafka.common.ZkNodeChangeNotificationListener$ChangeEventProcessThread)\\n\",\n      \"[2023-06-23 12:28:11,599] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Stopping socket server request processors (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:11,603] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Stopped socket server request processors (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:11,604] INFO [data-plane Kafka Request Handler on Broker 0], shutting down (kafka.server.KafkaRequestHandlerPool)\\n\",\n      \"[2023-06-23 12:28:11,605] INFO [data-plane Kafka Request Handler on Broker 0], shut down completely (kafka.server.KafkaRequestHandlerPool)\\n\",\n      \"[2023-06-23 12:28:11,607] INFO [ExpirationReaper-0-AlterAcls]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,608] INFO [ExpirationReaper-0-AlterAcls]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,608] INFO [ExpirationReaper-0-AlterAcls]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,609] INFO [KafkaApi-0] Shutdown complete. (kafka.server.KafkaApis)\\n\",\n      \"[2023-06-23 12:28:11,609] INFO [ExpirationReaper-0-topic]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,610] INFO [ExpirationReaper-0-topic]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,610] INFO [ExpirationReaper-0-topic]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,611] INFO [TransactionCoordinator id=0] Shutting down. (kafka.coordinator.transaction.TransactionCoordinator)\\n\",\n      \"[2023-06-23 12:28:11,612] INFO [Transaction State Manager 0]: Shutdown complete (kafka.coordinator.transaction.TransactionStateManager)\\n\",\n      \"[2023-06-23 12:28:11,612] INFO [TxnMarkerSenderThread-0]: Shutting down (kafka.coordinator.transaction.TransactionMarkerChannelManager)\\n\",\n      \"[2023-06-23 12:28:11,612] INFO [TxnMarkerSenderThread-0]: Stopped (kafka.coordinator.transaction.TransactionMarkerChannelManager)\\n\",\n      \"[2023-06-23 12:28:11,612] INFO [TxnMarkerSenderThread-0]: Shutdown completed (kafka.coordinator.transaction.TransactionMarkerChannelManager)\\n\",\n      \"[2023-06-23 12:28:11,613] INFO [TransactionCoordinator id=0] Shutdown complete. (kafka.coordinator.transaction.TransactionCoordinator)\\n\",\n      \"[2023-06-23 12:28:11,613] INFO [GroupCoordinator 0]: Shutting down. (kafka.coordinator.group.GroupCoordinator)\\n\",\n      \"[2023-06-23 12:28:11,614] INFO [ExpirationReaper-0-Heartbeat]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,614] INFO [ExpirationReaper-0-Heartbeat]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,615] INFO [ExpirationReaper-0-Heartbeat]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,615] INFO [ExpirationReaper-0-Rebalance]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,615] INFO [ExpirationReaper-0-Rebalance]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,615] INFO [ExpirationReaper-0-Rebalance]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,615] INFO [GroupCoordinator 0]: Shutdown complete. (kafka.coordinator.group.GroupCoordinator)\\n\",\n      \"[2023-06-23 12:28:11,616] INFO [ReplicaManager broker=0] Shutting down (kafka.server.ReplicaManager)\\n\",\n      \"[2023-06-23 12:28:11,616] INFO [LogDirFailureHandler]: Shutting down (kafka.server.ReplicaManager$LogDirFailureHandler)\\n\",\n      \"[2023-06-23 12:28:11,616] INFO [LogDirFailureHandler]: Stopped (kafka.server.ReplicaManager$LogDirFailureHandler)\\n\",\n      \"[2023-06-23 12:28:11,616] INFO [LogDirFailureHandler]: Shutdown completed (kafka.server.ReplicaManager$LogDirFailureHandler)\\n\",\n      \"[2023-06-23 12:28:11,616] INFO [ReplicaFetcherManager on broker 0] shutting down (kafka.server.ReplicaFetcherManager)\\n\",\n      \"[2023-06-23 12:28:11,617] INFO [ReplicaFetcherManager on broker 0] shutdown completed (kafka.server.ReplicaFetcherManager)\\n\",\n      \"[2023-06-23 12:28:11,617] INFO [ReplicaAlterLogDirsManager on broker 0] shutting down (kafka.server.ReplicaAlterLogDirsManager)\\n\",\n      \"[2023-06-23 12:28:11,618] INFO [ReplicaAlterLogDirsManager on broker 0] shutdown completed (kafka.server.ReplicaAlterLogDirsManager)\\n\",\n      \"[2023-06-23 12:28:11,618] INFO [ExpirationReaper-0-Fetch]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,618] INFO [ExpirationReaper-0-Fetch]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,618] INFO [ExpirationReaper-0-Fetch]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,618] INFO [ExpirationReaper-0-Produce]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,619] INFO [ExpirationReaper-0-Produce]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,619] INFO [ExpirationReaper-0-Produce]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,619] INFO [ExpirationReaper-0-DeleteRecords]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,619] INFO [ExpirationReaper-0-DeleteRecords]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,619] INFO [ExpirationReaper-0-DeleteRecords]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,619] INFO [ExpirationReaper-0-ElectLeader]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,620] INFO [ExpirationReaper-0-ElectLeader]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,620] INFO [ExpirationReaper-0-ElectLeader]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:11,624] INFO [ReplicaManager broker=0] Shut down completely (kafka.server.ReplicaManager)\\n\",\n      \"[2023-06-23 12:28:11,625] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Shutting down (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:11,625] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Stopped (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:11,625] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Shutdown completed (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:11,627] INFO Broker to controller channel manager for alter-partition shutdown (kafka.server.BrokerToControllerChannelManagerImpl)\\n\",\n      \"[2023-06-23 12:28:11,627] INFO [zk-broker-0-to-controller-forwarding-channel-manager]: Shutting down (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:11,627] INFO [zk-broker-0-to-controller-forwarding-channel-manager]: Stopped (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:11,627] INFO [zk-broker-0-to-controller-forwarding-channel-manager]: Shutdown completed (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:11,627] INFO Broker to controller channel manager for forwarding shutdown (kafka.server.BrokerToControllerChannelManagerImpl)\\n\",\n      \"[2023-06-23 12:28:11,628] INFO Shutting down. (kafka.log.LogManager)\\n\",\n      \"[2023-06-23 12:28:11,629] INFO [kafka-log-cleaner-thread-0]: Shutting down (kafka.log.LogCleaner$CleanerThread)\\n\",\n      \"[2023-06-23 12:28:11,630] INFO [kafka-log-cleaner-thread-0]: Stopped (kafka.log.LogCleaner$CleanerThread)\\n\",\n      \"[2023-06-23 12:28:11,630] INFO [kafka-log-cleaner-thread-0]: Shutdown completed (kafka.log.LogCleaner$CleanerThread)\\n\",\n      \"[2023-06-23 12:28:11,641] INFO Shutdown complete. (kafka.log.LogManager)\\n\",\n      \"[2023-06-23 12:28:11,646] INFO [feature-zk-node-event-process-thread]: Shutting down (kafka.server.FinalizedFeatureChangeListener$ChangeNotificationProcessorThread)\\n\",\n      \"[2023-06-23 12:28:11,646] INFO [feature-zk-node-event-process-thread]: Shutdown completed (kafka.server.FinalizedFeatureChangeListener$ChangeNotificationProcessorThread)\\n\",\n      \"[2023-06-23 12:28:11,646] INFO [feature-zk-node-event-process-thread]: Stopped (kafka.server.FinalizedFeatureChangeListener$ChangeNotificationProcessorThread)\\n\",\n      \"[2023-06-23 12:28:11,646] INFO [ZooKeeperClient Kafka server] Closing. (kafka.zookeeper.ZooKeeperClient)\\n\",\n      \"[2023-06-23 12:28:11,750] INFO Session: 0x10001a4af720000 closed (org.apache.zookeeper.ZooKeeper)\\n\",\n      \"[2023-06-23 12:28:11,750] INFO EventThread shut down for session: 0x10001a4af720000 (org.apache.zookeeper.ClientCnxn)\\n\",\n      \"[2023-06-23 12:28:11,751] INFO [ZooKeeperClient Kafka server] Closed. (kafka.zookeeper.ZooKeeperClient)\\n\",\n      \"[2023-06-23 12:28:11,751] INFO [ThrottledChannelReaper-Fetch]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,752] INFO [ThrottledChannelReaper-Fetch]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,752] INFO [ThrottledChannelReaper-Fetch]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,752] INFO [ThrottledChannelReaper-Produce]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,752] INFO [ThrottledChannelReaper-Produce]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,752] INFO [ThrottledChannelReaper-Produce]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,753] INFO [ThrottledChannelReaper-Request]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,753] INFO [ThrottledChannelReaper-Request]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,753] INFO [ThrottledChannelReaper-Request]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,753] INFO [ThrottledChannelReaper-ControllerMutation]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,753] INFO [ThrottledChannelReaper-ControllerMutation]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,753] INFO [ThrottledChannelReaper-ControllerMutation]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:11,754] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Shutting down socket server (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:11,763] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Shutdown completed (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:11,763] INFO Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics)\\n\",\n      \"[2023-06-23 12:28:11,763] INFO Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics)\\n\",\n      \"[2023-06-23 12:28:11,763] INFO Metrics reporters closed (org.apache.kafka.common.metrics.Metrics)\\n\",\n      \"[2023-06-23 12:28:11,764] INFO Broker and topic stats closed (kafka.server.BrokerTopicStats)\\n\",\n      \"[2023-06-23 12:28:11,764] INFO App info kafka.server for 0 unregistered (org.apache.kafka.common.utils.AppInfoParser)\\n\",\n      \"[2023-06-23 12:28:11,765] INFO [KafkaServer id=0] shut down completed (kafka.server.KafkaServer)\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = ApacheKafkaBroker(listener_port=29092)\\n\",\n    \"async with broker:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"print(\\\"*\\\" * 50 + \\\"ZOOKEEPER LOGS\\\" + \\\"+\\\" * 50)\\n\",\n    \"zookeeper_output, _ = await broker.zookeeper_task.communicate()\\n\",\n    \"print(zookeeper_output.decode(\\\"UTF-8\\\"))\\n\",\n    \"\\n\",\n    \"print(\\\"*\\\" * 50 + \\\"KAFKA LOGS\\\" + \\\"+\\\" * 50)\\n\",\n    \"kafka_output, _ = await broker.kafka_task.communicate()\\n\",\n    \"print(kafka_output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"10338a30\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"Port 2181 is already in use\\n\",\n      \"[INFO] __main__: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"[INFO] __main__: zookeeper new port=34331\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 48014...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 48014 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 47632...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 47632 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker_1 = ApacheKafkaBroker(listener_port=29092)\\n\",\n    \"async with broker_1:\\n\",\n    \"    port = broker_1.zookeeper_kwargs[\\\"zookeeper_port\\\"]\\n\",\n    \"    broker_2 = ApacheKafkaBroker(zookeeper_port=port, retries=0)\\n\",\n    \"    with pytest.raises(ValueError) as e:\\n\",\n    \"        async with broker_2:\\n\",\n    \"            pass\\n\",\n    \"\\n\",\n    \"assert e.value.args[0].startswith(\\\"Could not start zookeeper with params:\\\")\\n\",\n    \"\\n\",\n    \"for broker in [broker_2]:\\n\",\n    \"    assert broker.zookeeper_task == None\\n\",\n    \"#     print(\\\"*\\\" * 50 + \\\"ZOOKEEPER LOGS\\\" + \\\"+\\\" * 50)\\n\",\n    \"#     zookeeper_output, _ = await broker.zookeeper_task.communicate()\\n\",\n    \"#     print(zookeeper_output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c0aff342\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def start(self: ApacheKafkaBroker) -> str:\\n\",\n    \"    \\\"\\\"\\\"Starts a local Kafka broker and ZooKeeper instance synchronously.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The Kafka broker bootstrap server address in string format: host:port.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(f\\\"{self.__class__.__name__}.start(): entering...\\\")\\n\",\n    \"    try:\\n\",\n    \"        # get or create loop\\n\",\n    \"        try:\\n\",\n    \"            loop = asyncio.get_event_loop()\\n\",\n    \"        except RuntimeError as e:\\n\",\n    \"            logger.warning(\\n\",\n    \"                f\\\"{self.__class__.__name__}.start(): RuntimeError raised when calling asyncio.get_event_loop(): {e}\\\"\\n\",\n    \"            )\\n\",\n    \"            logger.warning(\\n\",\n    \"                f\\\"{self.__class__.__name__}.start(): asyncio.new_event_loop()\\\"\\n\",\n    \"            )\\n\",\n    \"            loop = asyncio.new_event_loop()\\n\",\n    \"\\n\",\n    \"        # start zookeeper and kafka broker in the loop\\n\",\n    \"\\n\",\n    \"        if loop.is_running():\\n\",\n    \"            if self.apply_nest_asyncio:\\n\",\n    \"                logger.warning(\\n\",\n    \"                    f\\\"{self.__class__.__name__}.start(): ({loop}) is already running!\\\"\\n\",\n    \"                )\\n\",\n    \"                logger.warning(\\n\",\n    \"                    f\\\"{self.__class__.__name__}.start(): calling nest_asyncio.apply()\\\"\\n\",\n    \"                )\\n\",\n    \"                nest_asyncio.apply(loop)\\n\",\n    \"            else:\\n\",\n    \"                msg = f\\\"{self.__class__.__name__}.start(): ({loop}) is already running! Use 'apply_nest_asyncio=True' when creating 'ApacheKafkaBroker' to prevent this.\\\"\\n\",\n    \"                logger.error(msg)\\n\",\n    \"                raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"        retval = loop.run_until_complete(self._start())\\n\",\n    \"        logger.info(f\\\"{self.__class__}.start(): returning {retval}\\\")\\n\",\n    \"        return retval\\n\",\n    \"    finally:\\n\",\n    \"        logger.info(f\\\"{self.__class__.__name__}.start(): exited.\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def stop(self: ApacheKafkaBroker) -> None:\\n\",\n    \"    \\\"\\\"\\\"Stops a local kafka broker and zookeeper instance synchronously\\\"\\\"\\\"\\n\",\n    \"    logger.info(f\\\"{self.__class__.__name__}.stop(): entering...\\\")\\n\",\n    \"    try:\\n\",\n    \"        if not self._is_started:\\n\",\n    \"            raise RuntimeError(\\n\",\n    \"                \\\"ApacheKafkaBroker not started yet, please call ApacheKafkaBroker.start() before!\\\"\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"        loop = asyncio.get_event_loop()\\n\",\n    \"        loop.run_until_complete(self._stop())\\n\",\n    \"    finally:\\n\",\n    \"        logger.info(f\\\"{self.__class__.__name__}.stop(): exited.\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"23db243b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] __main__: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] __main__: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"[INFO] __main__: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:29092\\n\",\n      \"[INFO] __main__: ApacheKafkaBroker.start(): exited.\\n\",\n      \"Hello world!\\n\",\n      \"[INFO] __main__: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 48880...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 48880 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 48500...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 48500 terminated.\\n\",\n      \"[INFO] __main__: ApacheKafkaBroker.stop(): exited.\\n\",\n      \"**************************************************ZOOKEEPER LOGS++++++++++++++++++++++++++++++++++++++++++++++++++\\n\",\n      \"[2023-06-23 12:28:19,686] INFO PrepRequestProcessor (sid:0) started, reconfigEnabled=false (org.apache.zookeeper.server.PrepRequestProcessor)\\n\",\n      \"[2023-06-23 12:28:19,687] INFO zookeeper.request_throttler.shutdownTimeout = 10000 (org.apache.zookeeper.server.RequestThrottler)\\n\",\n      \"[2023-06-23 12:28:19,699] INFO Using checkIntervalMs=60000 maxPerMinute=10000 maxNeverUsedIntervalMs=0 (org.apache.zookeeper.server.ContainerManager)\\n\",\n      \"[2023-06-23 12:28:19,700] INFO ZooKeeper audit is disabled. (org.apache.zookeeper.audit.ZKAuditProvider)\\n\",\n      \"[2023-06-23 12:28:20,632] INFO Creating new log file: log.1 (org.apache.zookeeper.server.persistence.FileTxnLog)\\n\",\n      \"\\n\",\n      \"**************************************************KAFKA LOGS++++++++++++++++++++++++++++++++++++++++++++++++++\\n\",\n      \"[2023-06-23 12:28:21,706] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Recorded new controller, from now on will use node kumaran-fastkafka-devel:29092 (id: 0 rack: null) (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:22,660] INFO Terminating process due to signal SIGTERM (org.apache.kafka.common.utils.LoggingSignalHandler)\\n\",\n      \"[2023-06-23 12:28:22,661] INFO [KafkaServer id=0] shutting down (kafka.server.KafkaServer)\\n\",\n      \"[2023-06-23 12:28:22,662] INFO [KafkaServer id=0] Starting controlled shutdown (kafka.server.KafkaServer)\\n\",\n      \"[2023-06-23 12:28:22,673] INFO [KafkaServer id=0] Controlled shutdown request returned successfully after 7ms (kafka.server.KafkaServer)\\n\",\n      \"[2023-06-23 12:28:22,676] INFO [/config/changes-event-process-thread]: Shutting down (kafka.common.ZkNodeChangeNotificationListener$ChangeEventProcessThread)\\n\",\n      \"[2023-06-23 12:28:22,676] INFO [/config/changes-event-process-thread]: Stopped (kafka.common.ZkNodeChangeNotificationListener$ChangeEventProcessThread)\\n\",\n      \"[2023-06-23 12:28:22,676] INFO [/config/changes-event-process-thread]: Shutdown completed (kafka.common.ZkNodeChangeNotificationListener$ChangeEventProcessThread)\\n\",\n      \"[2023-06-23 12:28:22,677] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Stopping socket server request processors (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:22,682] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Stopped socket server request processors (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:22,682] INFO [data-plane Kafka Request Handler on Broker 0], shutting down (kafka.server.KafkaRequestHandlerPool)\\n\",\n      \"[2023-06-23 12:28:22,683] INFO [data-plane Kafka Request Handler on Broker 0], shut down completely (kafka.server.KafkaRequestHandlerPool)\\n\",\n      \"[2023-06-23 12:28:22,685] INFO [ExpirationReaper-0-AlterAcls]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,686] INFO [ExpirationReaper-0-AlterAcls]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,686] INFO [ExpirationReaper-0-AlterAcls]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,687] INFO [KafkaApi-0] Shutdown complete. (kafka.server.KafkaApis)\\n\",\n      \"[2023-06-23 12:28:22,687] INFO [ExpirationReaper-0-topic]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,687] INFO [ExpirationReaper-0-topic]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,687] INFO [ExpirationReaper-0-topic]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,689] INFO [TransactionCoordinator id=0] Shutting down. (kafka.coordinator.transaction.TransactionCoordinator)\\n\",\n      \"[2023-06-23 12:28:22,689] INFO [Transaction State Manager 0]: Shutdown complete (kafka.coordinator.transaction.TransactionStateManager)\\n\",\n      \"[2023-06-23 12:28:22,689] INFO [TxnMarkerSenderThread-0]: Shutting down (kafka.coordinator.transaction.TransactionMarkerChannelManager)\\n\",\n      \"[2023-06-23 12:28:22,690] INFO [TxnMarkerSenderThread-0]: Stopped (kafka.coordinator.transaction.TransactionMarkerChannelManager)\\n\",\n      \"[2023-06-23 12:28:22,690] INFO [TxnMarkerSenderThread-0]: Shutdown completed (kafka.coordinator.transaction.TransactionMarkerChannelManager)\\n\",\n      \"[2023-06-23 12:28:22,690] INFO [TransactionCoordinator id=0] Shutdown complete. (kafka.coordinator.transaction.TransactionCoordinator)\\n\",\n      \"[2023-06-23 12:28:22,690] INFO [GroupCoordinator 0]: Shutting down. (kafka.coordinator.group.GroupCoordinator)\\n\",\n      \"[2023-06-23 12:28:22,691] INFO [ExpirationReaper-0-Heartbeat]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,691] INFO [ExpirationReaper-0-Heartbeat]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,691] INFO [ExpirationReaper-0-Heartbeat]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,692] INFO [ExpirationReaper-0-Rebalance]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,692] INFO [ExpirationReaper-0-Rebalance]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,692] INFO [ExpirationReaper-0-Rebalance]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,692] INFO [GroupCoordinator 0]: Shutdown complete. (kafka.coordinator.group.GroupCoordinator)\\n\",\n      \"[2023-06-23 12:28:22,693] INFO [ReplicaManager broker=0] Shutting down (kafka.server.ReplicaManager)\\n\",\n      \"[2023-06-23 12:28:22,693] INFO [LogDirFailureHandler]: Shutting down (kafka.server.ReplicaManager$LogDirFailureHandler)\\n\",\n      \"[2023-06-23 12:28:22,693] INFO [LogDirFailureHandler]: Stopped (kafka.server.ReplicaManager$LogDirFailureHandler)\\n\",\n      \"[2023-06-23 12:28:22,693] INFO [LogDirFailureHandler]: Shutdown completed (kafka.server.ReplicaManager$LogDirFailureHandler)\\n\",\n      \"[2023-06-23 12:28:22,694] INFO [ReplicaFetcherManager on broker 0] shutting down (kafka.server.ReplicaFetcherManager)\\n\",\n      \"[2023-06-23 12:28:22,694] INFO [ReplicaFetcherManager on broker 0] shutdown completed (kafka.server.ReplicaFetcherManager)\\n\",\n      \"[2023-06-23 12:28:22,695] INFO [ReplicaAlterLogDirsManager on broker 0] shutting down (kafka.server.ReplicaAlterLogDirsManager)\\n\",\n      \"[2023-06-23 12:28:22,695] INFO [ReplicaAlterLogDirsManager on broker 0] shutdown completed (kafka.server.ReplicaAlterLogDirsManager)\\n\",\n      \"[2023-06-23 12:28:22,695] INFO [ExpirationReaper-0-Fetch]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,695] INFO [ExpirationReaper-0-Fetch]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,695] INFO [ExpirationReaper-0-Fetch]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,696] INFO [ExpirationReaper-0-Produce]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,696] INFO [ExpirationReaper-0-Produce]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,696] INFO [ExpirationReaper-0-Produce]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,696] INFO [ExpirationReaper-0-DeleteRecords]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,696] INFO [ExpirationReaper-0-DeleteRecords]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,696] INFO [ExpirationReaper-0-DeleteRecords]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,697] INFO [ExpirationReaper-0-ElectLeader]: Shutting down (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,697] INFO [ExpirationReaper-0-ElectLeader]: Stopped (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,697] INFO [ExpirationReaper-0-ElectLeader]: Shutdown completed (kafka.server.DelayedOperationPurgatory$ExpiredOperationReaper)\\n\",\n      \"[2023-06-23 12:28:22,702] INFO [ReplicaManager broker=0] Shut down completely (kafka.server.ReplicaManager)\\n\",\n      \"[2023-06-23 12:28:22,702] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Shutting down (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:22,702] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Stopped (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:22,702] INFO [zk-broker-0-to-controller-alter-partition-channel-manager]: Shutdown completed (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:22,703] INFO Broker to controller channel manager for alter-partition shutdown (kafka.server.BrokerToControllerChannelManagerImpl)\\n\",\n      \"[2023-06-23 12:28:22,704] INFO [zk-broker-0-to-controller-forwarding-channel-manager]: Shutting down (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:22,704] INFO [zk-broker-0-to-controller-forwarding-channel-manager]: Stopped (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:22,704] INFO [zk-broker-0-to-controller-forwarding-channel-manager]: Shutdown completed (kafka.server.BrokerToControllerRequestThread)\\n\",\n      \"[2023-06-23 12:28:22,704] INFO Broker to controller channel manager for forwarding shutdown (kafka.server.BrokerToControllerChannelManagerImpl)\\n\",\n      \"[2023-06-23 12:28:22,704] INFO Shutting down. (kafka.log.LogManager)\\n\",\n      \"[2023-06-23 12:28:22,705] INFO [kafka-log-cleaner-thread-0]: Shutting down (kafka.log.LogCleaner$CleanerThread)\\n\",\n      \"[2023-06-23 12:28:22,706] INFO [kafka-log-cleaner-thread-0]: Shutdown completed (kafka.log.LogCleaner$CleanerThread)\\n\",\n      \"[2023-06-23 12:28:22,706] INFO [kafka-log-cleaner-thread-0]: Stopped (kafka.log.LogCleaner$CleanerThread)\\n\",\n      \"[2023-06-23 12:28:22,724] INFO Shutdown complete. (kafka.log.LogManager)\\n\",\n      \"[2023-06-23 12:28:22,729] INFO [feature-zk-node-event-process-thread]: Shutting down (kafka.server.FinalizedFeatureChangeListener$ChangeNotificationProcessorThread)\\n\",\n      \"[2023-06-23 12:28:22,729] INFO [feature-zk-node-event-process-thread]: Shutdown completed (kafka.server.FinalizedFeatureChangeListener$ChangeNotificationProcessorThread)\\n\",\n      \"[2023-06-23 12:28:22,729] INFO [feature-zk-node-event-process-thread]: Stopped (kafka.server.FinalizedFeatureChangeListener$ChangeNotificationProcessorThread)\\n\",\n      \"[2023-06-23 12:28:22,729] INFO [ZooKeeperClient Kafka server] Closing. (kafka.zookeeper.ZooKeeperClient)\\n\",\n      \"[2023-06-23 12:28:22,833] INFO Session: 0x10001a4d9ec0000 closed (org.apache.zookeeper.ZooKeeper)\\n\",\n      \"[2023-06-23 12:28:22,833] INFO EventThread shut down for session: 0x10001a4d9ec0000 (org.apache.zookeeper.ClientCnxn)\\n\",\n      \"[2023-06-23 12:28:22,834] INFO [ZooKeeperClient Kafka server] Closed. (kafka.zookeeper.ZooKeeperClient)\\n\",\n      \"[2023-06-23 12:28:22,834] INFO [ThrottledChannelReaper-Fetch]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,835] INFO [ThrottledChannelReaper-Fetch]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,835] INFO [ThrottledChannelReaper-Fetch]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,835] INFO [ThrottledChannelReaper-Produce]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-Produce]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-Produce]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-Request]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-Request]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-Request]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-ControllerMutation]: Shutting down (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-ControllerMutation]: Stopped (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,836] INFO [ThrottledChannelReaper-ControllerMutation]: Shutdown completed (kafka.server.ClientQuotaManager$ThrottledChannelReaper)\\n\",\n      \"[2023-06-23 12:28:22,837] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Shutting down socket server (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:22,846] INFO [SocketServer listenerType=ZK_BROKER, nodeId=0] Shutdown completed (kafka.network.SocketServer)\\n\",\n      \"[2023-06-23 12:28:22,846] INFO Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics)\\n\",\n      \"[2023-06-23 12:28:22,846] INFO Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics)\\n\",\n      \"[2023-06-23 12:28:22,846] INFO Metrics reporters closed (org.apache.kafka.common.metrics.Metrics)\\n\",\n      \"[2023-06-23 12:28:22,847] INFO Broker and topic stats closed (kafka.server.BrokerTopicStats)\\n\",\n      \"[2023-06-23 12:28:22,848] INFO App info kafka.server for 0 unregistered (org.apache.kafka.common.utils.AppInfoParser)\\n\",\n      \"[2023-06-23 12:28:22,848] INFO [KafkaServer id=0] shut down completed (kafka.server.KafkaServer)\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"broker = ApacheKafkaBroker(apply_nest_asyncio=True, listener_port=29092)\\n\",\n    \"with broker:\\n\",\n    \"    print(\\\"Hello world!\\\")\\n\",\n    \"\\n\",\n    \"print(\\\"*\\\" * 50 + \\\"ZOOKEEPER LOGS\\\" + \\\"+\\\" * 50)\\n\",\n    \"zookeeper_output, _ = await broker.zookeeper_task.communicate()\\n\",\n    \"print(zookeeper_output.decode(\\\"UTF-8\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"print(\\\"*\\\" * 50 + \\\"KAFKA LOGS\\\" + \\\"+\\\" * 50)\\n\",\n    \"kafka_output, _ = await broker.kafka_task.communicate()\\n\",\n    \"print(kafka_output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"17c5fa47\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 49747...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 49747 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 49365...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 49365 terminated.\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topics = [\\\"topic_1\\\", \\\"topic_2\\\"]\\n\",\n    \"script_extension = \\\"bat\\\" if platform.system() == \\\"Windows\\\" else \\\"sh\\\"\\n\",\n    \"topics_script = f\\\"kafka-topics.{script_extension}\\\"\\n\",\n    \"\\n\",\n    \"async with ApacheKafkaBroker(topics=topics, listener_port=29092) as bootstrap_server:\\n\",\n    \"    task = await asyncio.create_subprocess_exec(\\n\",\n    \"        topics_script,\\n\",\n    \"        \\\"--list\\\",\\n\",\n    \"        f\\\"--bootstrap-server={bootstrap_server}\\\",\\n\",\n    \"        stdout=asyncio.subprocess.PIPE,\\n\",\n    \"        stdin=asyncio.subprocess.PIPE,\\n\",\n    \"    )\\n\",\n    \"    output, _ = await asyncio.wait_for(task.communicate(), 30)\\n\",\n    \"    listed_topics = output.decode(\\\"UTF-8\\\").split(\\\"\\\\n\\\")[:-1]\\n\",\n    \"    for i, topic in enumerate(listed_topics):\\n\",\n    \"        listed_topics[i] = topic.strip()\\n\",\n    \"    assert set(listed_topics) == set(topics), set(listed_topics)\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1cc7e1a9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] __main__: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] __main__: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"[INFO] __main__: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:29092\\n\",\n      \"[INFO] __main__: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test-topic'})\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test-topic': 1}. \\n\",\n      \"[INFO] __main__: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 51689...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 51689 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 51307...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 51307 terminated.\\n\",\n      \"[INFO] __main__: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"test_topic = \\\"test-topic\\\"\\n\",\n    \"test_msg = b\\\"test-msg\\\"\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[test_topic], apply_nest_asyncio=True, listener_port=29092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    consumer = AIOKafkaConsumer(test_topic, bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"    producer = AIOKafkaProducer(bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"    await consumer.start()\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"        await producer.send_and_wait(test_topic, test_msg)\\n\",\n    \"        msg = await consumer.getone()\\n\",\n    \"        assert msg, value == test_msg\\n\",\n    \"    finally:\\n\",\n    \"        await consumer.stop()\\n\",\n    \"        await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"225d7ef3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test-topic'})\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test-topic': 1}. \\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 52918...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 52918 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 52535...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 52535 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"test_topic = \\\"test-topic\\\"\\n\",\n    \"test_msg = b\\\"test-msg\\\"\\n\",\n    \"\\n\",\n    \"async with ApacheKafkaBroker(\\n\",\n    \"    topics=[test_topic], listener_port=29092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    consumer = AIOKafkaConsumer(test_topic, bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"    producer = AIOKafkaProducer(bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"    await consumer.start()\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"        await producer.send_and_wait(test_topic, test_msg)\\n\",\n    \"        msg = await consumer.getone()\\n\",\n    \"        assert msg, value == test_msg\\n\",\n    \"    finally:\\n\",\n    \"        await consumer.stop()\\n\",\n    \"        await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9b81e6bf\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def _start_broker(broker: Any) -> Union[Any, Exception]:\\n\",\n    \"    try:\\n\",\n    \"        await broker._start()\\n\",\n    \"        return broker\\n\",\n    \"    except Exception as e:\\n\",\n    \"        return e\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def _stop_broker(broker: Any) -> Union[Any, Exception]:\\n\",\n    \"    try:\\n\",\n    \"        await broker._stop()\\n\",\n    \"        return broker\\n\",\n    \"    except Exception as e:\\n\",\n    \"        return e\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def _get_unique_local_brokers_to_start(\\n\",\n    \"    kafka_brokers_name: str,\\n\",\n    \"    kafka_brokers: List[Dict[str, Dict[str, Any]]],\\n\",\n    \"    duplicate_ok: bool = False,\\n\",\n    \"    zookeeper_ports: List[int] = [2181],\\n\",\n    \"    ignore_nonlocal_brokers: bool = False,\\n\",\n    \") -> List[Tuple[str, int]]:\\n\",\n    \"    brokers_to_start = [\\n\",\n    \"        x[kafka_brokers_name] for x in kafka_brokers if kafka_brokers_name in x\\n\",\n    \"    ]\\n\",\n    \"    unique_brokers_to_start = set([(x[\\\"url\\\"], x[\\\"port\\\"]) for x in brokers_to_start])\\n\",\n    \"\\n\",\n    \"    if len(unique_brokers_to_start) < len(brokers_to_start) and not duplicate_ok:\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"Duplicate kafka_brokers are found - {brokers_to_start}. Please change values or use 'duplicate_ok=True'\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    unique_urls = set([x[0] for x in unique_brokers_to_start])\\n\",\n    \"    localhost_urls = set(\\n\",\n    \"        [\\n\",\n    \"            \\\"localhost\\\",\\n\",\n    \"            \\\"127.0.0.1\\\",\\n\",\n    \"            \\\"0.0.0.0\\\",  # nosec: B104 - Possible binding to all interfaces\\n\",\n    \"        ]\\n\",\n    \"    )\\n\",\n    \"    if not unique_urls.issubset(localhost_urls) and not ignore_nonlocal_brokers:\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"URL values other than {', '.join(sorted(localhost_urls))} are found - {unique_urls - localhost_urls}. Please change values or use 'ignore_nonlocal_brokers=True'\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    unique_local_brokers_to_start = [\\n\",\n    \"        x for x in unique_brokers_to_start if x[0] in localhost_urls\\n\",\n    \"    ]\\n\",\n    \"    return unique_local_brokers_to_start\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"037b1905\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"T = TypeVar(\\\"T\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def _start_and_stop_brokers(brokers: List[T]) -> AsyncIterator[None]:\\n\",\n    \"    try:\\n\",\n    \"        retvals = [await _start_broker(broker) for broker in brokers]\\n\",\n    \"        exceptions = [x for x in retvals if isinstance(x, Exception)]\\n\",\n    \"\\n\",\n    \"        if exceptions:\\n\",\n    \"            raise RuntimeError(exceptions)\\n\",\n    \"\\n\",\n    \"        yield\\n\",\n    \"    finally:\\n\",\n    \"        retvals = [\\n\",\n    \"            await _stop_broker(broker)\\n\",\n    \"            for broker in retvals\\n\",\n    \"            if not isinstance(broker, Exception)\\n\",\n    \"        ]\\n\",\n    \"        exceptions = [x for x in retvals if isinstance(x, Exception)]\\n\",\n    \"\\n\",\n    \"        if exceptions:\\n\",\n    \"            raise RuntimeError(exceptions)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def start_apache_kafka_brokers(\\n\",\n    \"    kafka_brokers_name: str,\\n\",\n    \"    kafka_brokers: List[Dict[str, Dict[str, Any]]],\\n\",\n    \"    duplicate_ok: bool = False,\\n\",\n    \"    zookeeper_ports: List[int] = [2181],\\n\",\n    \"    ignore_nonlocal_brokers: bool = False,\\n\",\n    \") -> AsyncIterator[None]:\\n\",\n    \"    unique_local_brokers_to_start = await _get_unique_local_brokers_to_start(\\n\",\n    \"        kafka_brokers_name=kafka_brokers_name,\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        duplicate_ok=duplicate_ok,\\n\",\n    \"        ignore_nonlocal_brokers=ignore_nonlocal_brokers,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    if len(zookeeper_ports) < len(unique_local_brokers_to_start):\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"Atleast {len(unique_local_brokers_to_start)} zookeeper ports are needed to start kafka. Current zookeeper_ports length is {len(zookeeper_ports)}\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    brokers = [\\n\",\n    \"        ApacheKafkaBroker(listener_port=broker[1], zookeeper_port=zookeeper_port)  # type: ignore\\n\",\n    \"        for broker, zookeeper_port in zip(\\n\",\n    \"            unique_local_brokers_to_start, zookeeper_ports\\n\",\n    \"        )\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    async with _start_and_stop_brokers(brokers=brokers):\\n\",\n    \"        yield\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"43b93840\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29093\\n\",\n      \"Succesfully started multiple brokers\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 54147...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 54147 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 53766...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 53766 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 54998...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 54998 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 54616...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 54616 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_broker_1 = dict(\\n\",\n    \"    development=dict(url=\\\"localhost\\\", port=29092),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"kafka_broker_2 = dict(\\n\",\n    \"    development=dict(url=\\\"localhost\\\", port=29093),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"kafka_brokers = [kafka_broker_1, kafka_broker_2]\\n\",\n    \"\\n\",\n    \"async with start_apache_kafka_brokers(\\n\",\n    \"    kafka_brokers_name=\\\"development\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    zookeeper_ports=[22181, 22182],\\n\",\n    \"):\\n\",\n    \"    print(\\\"Succesfully started multiple brokers\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"955e9828\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"<ExceptionInfo ValueError(\\\"Duplicate kafka_brokers are found - [{'url': 'localhost', 'port': 29092}, {'url': 'localhost', 'port': 29092}]. Please change values or use 'duplicate_ok=True'\\\") tblen=4>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers = [\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"localhost\\\", port=29092),\\n\",\n    \"    ),\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"localhost\\\", port=29092),\\n\",\n    \"    ),\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    async with start_apache_kafka_brokers(\\n\",\n    \"        kafka_brokers_name=\\\"development\\\",\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        duplicate_ok=False,\\n\",\n    \"    ):\\n\",\n    \"        print(\\\"If this is printed then the test didn't pass\\\")\\n\",\n    \"print(e)\\n\",\n    \"assert \\\"Duplicate kafka_brokers are found\\\" in str(e)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6f6ac313\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"<ExceptionInfo ValueError(\\\"URL values other than 0.0.0.0, 127.0.0.1, localhost are found - {'some_other_host'}. Please change values or use 'ignore_nonlocal_brokers=True'\\\") tblen=4>\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] __main__: Starting zookeeper...\\n\",\n      \"[INFO] __main__: Starting kafka...\\n\",\n      \"[INFO] __main__: Local Kafka broker up and running on 127.0.0.1:29092\\n\",\n      \"Starting again with 'ignore_nonlocal_brokers=True'\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 55882...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 55882 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 55501...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 55501 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers = [\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"127.0.0.1\\\", port=29092),\\n\",\n    \"    ),\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"some_other_host\\\", port=29092),\\n\",\n    \"    ),\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    async with start_apache_kafka_brokers(\\n\",\n    \"        kafka_brokers_name=\\\"development\\\",\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        ignore_nonlocal_brokers=False,\\n\",\n    \"    ):\\n\",\n    \"        print(\\\"If this is printed then the test didn't pass\\\")\\n\",\n    \"print(e)\\n\",\n    \"assert \\\"URL values other than 0.0.0.0, 127.0.0.1, localhost are found\\\" in str(e)\\n\",\n    \"\\n\",\n    \"async with start_apache_kafka_brokers(\\n\",\n    \"    kafka_brokers_name=\\\"development\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    ignore_nonlocal_brokers=True,\\n\",\n    \"):\\n\",\n    \"    print(\\\"Starting again with 'ignore_nonlocal_brokers=True'\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"24d6fbe7\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"<ExceptionInfo ValueError('Atleast 2 zookeeper ports are needed to start kafka. Current zookeeper_ports length is 1') tblen=3>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers = [\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"localhost\\\", port=29092),\\n\",\n    \"    ),\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"localhost\\\", port=29093),\\n\",\n    \"    ),\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    async with start_apache_kafka_brokers(\\n\",\n    \"        kafka_brokers_name=\\\"development\\\",\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        zookeeper_ports=[22181],\\n\",\n    \"    ):\\n\",\n    \"        print(\\\"If this is printed then the test didn't pass\\\")\\n\",\n    \"print(e)\\n\",\n    \"assert (\\n\",\n    \"    \\\"Atleast 2 zookeeper ports are needed to start kafka. Current zookeeper_ports length is 1\\\"\\n\",\n    \"    in str(e)\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f67fecd5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/003_LocalRedpandaBroker.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0f94c3f6\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _testing.local_redpanda_broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"df2fbc62\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"from pathlib import Path\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"import nest_asyncio\\n\",\n    \"\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._components.helpers import in_notebook\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import delegates, export, patch\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import get_free_port, run_and_match, _get_unique_local_brokers_to_start, _start_and_stop_brokers\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2de5d98a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import pytest\\n\",\n    \"\\n\",\n    \"from fastkafka._aiokafka_imports import AIOKafkaConsumer, AIOKafkaProducer\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a8cd1883\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"if in_notebook():\\n\",\n    \"    from tqdm.notebook import tqdm\\n\",\n    \"else:\\n\",\n    \"    from tqdm import tqdm\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f6d872ab\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"36ec8c10\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0291208d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_redpanda_docker_cmd(\\n\",\n    \"    listener_port: int = 9092,\\n\",\n    \"    tag: str = \\\"v23.1.2\\\",\\n\",\n    \"    seastar_core: int = 1,\\n\",\n    \"    memory: str = \\\"1G\\\",\\n\",\n    \"    mode: str = \\\"dev-container\\\",\\n\",\n    \"    default_log_level: str = \\\"debug\\\",\\n\",\n    \") -> List[str]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Generates a Docker CLI command to start redpanda container\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        listener_port: Port on which the clients (producers and consumers) can connect\\n\",\n    \"        tag: Tag of Redpanda image to use to start container\\n\",\n    \"        seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\\n\",\n    \"        memory: The amount of memory to make available to Redpanda\\n\",\n    \"        mode: Mode to use to load configuration properties in container\\n\",\n    \"        default_log_level: Log levels to use for Redpanda\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    redpanda_docker_cmd = [\\n\",\n    \"        \\\"docker\\\",\\n\",\n    \"        \\\"run\\\",\\n\",\n    \"        \\\"--rm\\\",\\n\",\n    \"        \\\"--name\\\",\\n\",\n    \"        f\\\"redpanda_{listener_port}\\\",\\n\",\n    \"        \\\"-p\\\",\\n\",\n    \"        f\\\"{listener_port}:{listener_port}\\\",\\n\",\n    \"        f\\\"docker.redpanda.com/redpandadata/redpanda:{tag}\\\",\\n\",\n    \"        \\\"redpanda\\\",\\n\",\n    \"        \\\"start\\\",\\n\",\n    \"        \\\"--kafka-addr\\\",\\n\",\n    \"        f\\\"internal://0.0.0.0:9090,external://0.0.0.0:{listener_port}\\\",\\n\",\n    \"        \\\"--advertise-kafka-addr\\\",\\n\",\n    \"        f\\\"internal://localhost:9090,external://localhost:{listener_port}\\\",\\n\",\n    \"        \\\"--smp\\\",\\n\",\n    \"        str(seastar_core),\\n\",\n    \"        \\\"--memory\\\",\\n\",\n    \"        memory,\\n\",\n    \"        \\\"--mode\\\",\\n\",\n    \"        mode,\\n\",\n    \"        \\\"--default-log-level\\\",\\n\",\n    \"        default_log_level,\\n\",\n    \"    ]\\n\",\n    \"    return redpanda_docker_cmd\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bfddd7b1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"expected = \\\"docker run --rm --name redpanda_9093 -p 9093:9093 docker.redpanda.com/redpandadata/redpanda:latest redpanda start --kafka-addr internal://0.0.0.0:9090,external://0.0.0.0:9093 --advertise-kafka-addr internal://localhost:9090,external://localhost:9093 --smp 1 --memory 2G --mode dev-container --default-log-level debug\\\"\\n\",\n    \"actual = get_redpanda_docker_cmd(listener_port=9093, tag=\\\"latest\\\", memory=\\\"2G\\\")\\n\",\n    \"assert \\\" \\\".join(actual) == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9dc00a80\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.testing\\\")\\n\",\n    \"class LocalRedpandaBroker:\\n\",\n    \"    \\\"\\\"\\\"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    @delegates(get_redpanda_docker_cmd, keep=True)\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        topics: Iterable[str] = [],\\n\",\n    \"        *,\\n\",\n    \"        retries: int = 3,\\n\",\n    \"        apply_nest_asyncio: bool = False,\\n\",\n    \"        **kwargs: Dict[str, Any],\\n\",\n    \"    ):\\n\",\n    \"        \\\"\\\"\\\"Initialises the LocalRedpandaBroker object\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            topics: List of topics to create after sucessfull redpanda broker startup\\n\",\n    \"            retries: Number of retries to create redpanda service\\n\",\n    \"            apply_nest_asyncio: set to True if running in notebook\\n\",\n    \"            listener_port: Port on which the clients (producers and consumers) can connect\\n\",\n    \"            tag: Tag of Redpanda image to use to start container\\n\",\n    \"            seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)\\n\",\n    \"            memory: The amount of memory to make available to Redpanda\\n\",\n    \"            mode: Mode to use to load configuration properties in container\\n\",\n    \"            default_log_level: Log levels to use for Redpanda\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.redpanda_kwargs = kwargs\\n\",\n    \"\\n\",\n    \"        if \\\"listener_port\\\" not in self.redpanda_kwargs:\\n\",\n    \"            self.redpanda_kwargs[\\\"listener_port\\\"] = 9092  # type: ignore\\n\",\n    \"\\n\",\n    \"        self.retries = retries\\n\",\n    \"        self.apply_nest_asyncio = apply_nest_asyncio\\n\",\n    \"        self.temporary_directory: Optional[TemporaryDirectory] = None\\n\",\n    \"        self.temporary_directory_path: Optional[Path] = None\\n\",\n    \"        self.redpanda_task: Optional[asyncio.subprocess.Process] = None\\n\",\n    \"        self._is_started = False\\n\",\n    \"        self.topics: Iterable[str] = topics\\n\",\n    \"\\n\",\n    \"    @property\\n\",\n    \"    def is_started(self) -> bool:\\n\",\n    \"        \\\"\\\"\\\"Property indicating whether the LocalRedpandaBroker object is started.\\n\",\n    \"\\n\",\n    \"        The is_started property indicates if the LocalRedpandaBroker object is currently \\n\",\n    \"        in a started state. This implies that Redpanda docker container has sucesfully \\n\",\n    \"        started and is ready for handling events.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            bool: True if the object is started, False otherwise.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return self._is_started\\n\",\n    \"\\n\",\n    \"    @classmethod\\n\",\n    \"    async def _check_deps(cls) -> None:\\n\",\n    \"        \\\"\\\"\\\"Prepares the environment for running redpanda brokers.\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _start(self) -> str:\\n\",\n    \"        \\\"\\\"\\\"Starts a local redpanda broker instance asynchronously\\n\",\n    \"        Returns:\\n\",\n    \"           Redpanda broker bootstrap server address in string format: add:port\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def start(self) -> str:\\n\",\n    \"        \\\"\\\"\\\"Starts a local redpanda broker instance synchronously\\n\",\n    \"        Returns:\\n\",\n    \"           Redpanda broker bootstrap server address in string format: add:port\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def stop(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Stops a local redpanda broker instance synchronously\\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _stop(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Stops a local redpanda broker instance synchronously\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def get_service_config_string(self, service: str, *, data_dir: Path) -> str:\\n\",\n    \"        \\\"\\\"\\\"Generates a configuration for a service\\n\",\n    \"        Args:\\n\",\n    \"            data_dir: Path to the directory where the zookeepeer instance will save data\\n\",\n    \"            service: \\\"redpanda\\\", defines which service to get config string for\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _start_redpanda(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Start a local redpanda broker\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _create_topics(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Create missing topics in local redpanda broker\\n\",\n    \"        Returns:\\n\",\n    \"           None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def __enter__(self) -> str:\\n\",\n    \"        return self.start()\\n\",\n    \"\\n\",\n    \"    def __exit__(self, *args: Any, **kwargs: Any) -> None:\\n\",\n    \"        self.stop()\\n\",\n    \"\\n\",\n    \"    async def __aenter__(self) -> str:\\n\",\n    \"        return await self._start()\\n\",\n    \"\\n\",\n    \"    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\\n\",\n    \"        await self._stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e04f2dc8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<fastkafka.testing.LocalRedpandaBroker>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"broker = LocalRedpandaBroker()\\n\",\n    \"broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"94f530a3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def check_docker(tag: str = \\\"v23.1.2\\\") -> bool:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Checks if a Docker image with the specified tag is available.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        tag: The tag of the Docker image to check. Defaults to \\\"v23.1.2\\\".\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        bool: True if the Docker image is available; False otherwise.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        docker_task = await run_and_match(\\n\",\n    \"            \\\"docker\\\",\\n\",\n    \"            \\\"pull\\\",\\n\",\n    \"            f\\\"docker.redpanda.com/redpandadata/redpanda:{tag}\\\",\\n\",\n    \"            pattern=f\\\"docker.redpanda.com/redpandadata/redpanda:{tag}\\\",\\n\",\n    \"        )\\n\",\n    \"        return True\\n\",\n    \"    except Exception as e:\\n\",\n    \"        logger.debug(f\\\"Error in check_docker() : {e}\\\")\\n\",\n    \"        return False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ecc3b8df\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"False\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"await check_docker()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7f6673e2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch(cls_method=True)  # type: ignore\\n\",\n    \"async def _check_deps(cls: LocalRedpandaBroker) -> None:\\n\",\n    \"    if not await check_docker():\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            \\\"Docker installation not found! Please install docker manually and retry.\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6c20e8f7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"broker = LocalRedpandaBroker()\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"    await broker._check_deps()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"80275185\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# docker run --rm --name redpanda_9092 -p 9092:9092 docker.redpanda.com/redpandadata/redpanda 'redpanda start --kafka-addr internal://0.0.0.0:9090,external://0.0.0.0:9092 --advertise-kafka-addr internal://localhost:9090,external://localhost:9092'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"aae79c68\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _start_redpanda(self: LocalRedpandaBroker, service: str = \\\"redpanda\\\") -> None:\\n\",\n    \"    logger.info(f\\\"Starting {service}...\\\")\\n\",\n    \"\\n\",\n    \"    if self.temporary_directory_path is None:\\n\",\n    \"        raise ValueError(\\n\",\n    \"            \\\"LocalRedpandaBroker._start_redpanda(): self.temporary_directory_path is None, did you initialise it?\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    configs_tried: List[Dict[str, Any]] = []\\n\",\n    \"\\n\",\n    \"    for i in range(self.retries + 1):\\n\",\n    \"        configs_tried = configs_tried + [getattr(self, f\\\"{service}_kwargs\\\").copy()]\\n\",\n    \"\\n\",\n    \"        redpanda_docker_cmd = get_redpanda_docker_cmd(**self.redpanda_kwargs)  # type: ignore\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            service_task = await run_and_match(\\n\",\n    \"                *redpanda_docker_cmd,\\n\",\n    \"                capture=\\\"stderr\\\",\\n\",\n    \"                pattern=\\\"Bootstrap complete\\\",\\n\",\n    \"                timeout=30,\\n\",\n    \"            )\\n\",\n    \"        except Exception as e:\\n\",\n    \"            logger.info(\\n\",\n    \"                f\\\"{service} startup failed, generating a new port and retrying...\\\"\\n\",\n    \"            )\\n\",\n    \"            port = get_free_port()\\n\",\n    \"            self.redpanda_kwargs[\\\"listener_port\\\"] = port  # type: ignore\\n\",\n    \"\\n\",\n    \"            logger.info(f\\\"port={port}\\\")\\n\",\n    \"        else:\\n\",\n    \"            setattr(self, f\\\"{service}_task\\\", service_task)\\n\",\n    \"            return\\n\",\n    \"\\n\",\n    \"    raise ValueError(f\\\"Could not start {service} with params: {configs_tried}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _create_topics(self: LocalRedpandaBroker) -> None:\\n\",\n    \"    listener_port = self.redpanda_kwargs.get(\\\"listener_port\\\", 9092)\\n\",\n    \"\\n\",\n    \"    async with asyncer.create_task_group() as tg:\\n\",\n    \"        processes = [\\n\",\n    \"            tg.soonify(run_and_match)(\\n\",\n    \"                \\\"docker\\\",\\n\",\n    \"                \\\"exec\\\",\\n\",\n    \"                f\\\"redpanda_{listener_port}\\\",\\n\",\n    \"                \\\"rpk\\\",\\n\",\n    \"                \\\"topic\\\",\\n\",\n    \"                \\\"create\\\",\\n\",\n    \"                topic,\\n\",\n    \"                pattern=topic,\\n\",\n    \"                timeout=10,\\n\",\n    \"            )\\n\",\n    \"            for topic in self.topics\\n\",\n    \"        ]\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"        return_values = [\\n\",\n    \"            await asyncio.wait_for(process.value.wait(), 30) for process in processes\\n\",\n    \"        ]\\n\",\n    \"        if any(return_value != 0 for return_value in return_values):\\n\",\n    \"            raise ValueError(\\\"Could not create missing topics!\\\")\\n\",\n    \"    except asyncio.TimeoutError as _:\\n\",\n    \"        raise ValueError(\\\"Timed out while creating missing topics!\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _start(self: LocalRedpandaBroker) -> str:\\n\",\n    \"    await self._check_deps()\\n\",\n    \"\\n\",\n    \"    self.temporary_directory = TemporaryDirectory()\\n\",\n    \"    self.temporary_directory_path = Path(self.temporary_directory.__enter__())\\n\",\n    \"\\n\",\n    \"    await self._start_redpanda()\\n\",\n    \"    await asyncio.sleep(5)\\n\",\n    \"\\n\",\n    \"    listener_port = self.redpanda_kwargs.get(\\\"listener_port\\\", 9092)\\n\",\n    \"    bootstrap_server = f\\\"127.0.0.1:{listener_port}\\\"\\n\",\n    \"    logger.info(f\\\"Local Redpanda broker up and running on {bootstrap_server}\\\")\\n\",\n    \"\\n\",\n    \"    await self._create_topics()\\n\",\n    \"\\n\",\n    \"    self._is_started = True\\n\",\n    \"\\n\",\n    \"    return bootstrap_server\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _stop(self: LocalRedpandaBroker) -> None:\\n\",\n    \"    logger.info(f\\\"Stopping redpanda...\\\")\\n\",\n    \"    await terminate_asyncio_process(self.redpanda_task)  # type: ignore\\n\",\n    \"    logger.info(f\\\"Redpanda stopped.\\\")\\n\",\n    \"    self.temporary_directory.__exit__(None, None, None)  # type: ignore\\n\",\n    \"    self._is_started = False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3f09cf87\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"broker = LocalRedpandaBroker()\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"    async with broker:\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    print(\\\"*\\\" * 50 + \\\"REDPANDA LOGS\\\" + \\\"+\\\" * 50)\\n\",\n    \"    redpanda_output, _ = await broker.redpanda_task.communicate()\\n\",\n    \"    print(redpanda_output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8b98298a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def start(self: LocalRedpandaBroker) -> str:\\n\",\n    \"    \\\"\\\"\\\"Starts a local redpanda broker instance synchronously\\n\",\n    \"    Returns:\\n\",\n    \"       Redpanda broker bootstrap server address in string format: add:port\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(f\\\"{self.__class__.__name__}.start(): entering...\\\")\\n\",\n    \"    try:\\n\",\n    \"        # get or create loop\\n\",\n    \"        try:\\n\",\n    \"            loop = asyncio.get_event_loop()\\n\",\n    \"        except RuntimeError as e:\\n\",\n    \"            logger.warning(\\n\",\n    \"                f\\\"{self.__class__.__name__}.start(): RuntimeError raised when calling asyncio.get_event_loop(): {e}\\\"\\n\",\n    \"            )\\n\",\n    \"            logger.warning(\\n\",\n    \"                f\\\"{self.__class__.__name__}.start(): asyncio.new_event_loop()\\\"\\n\",\n    \"            )\\n\",\n    \"            loop = asyncio.new_event_loop()\\n\",\n    \"\\n\",\n    \"        # start redpanda broker in the loop\\n\",\n    \"\\n\",\n    \"        if loop.is_running():\\n\",\n    \"            if self.apply_nest_asyncio:\\n\",\n    \"                logger.warning(\\n\",\n    \"                    f\\\"{self.__class__.__name__}.start(): ({loop}) is already running!\\\"\\n\",\n    \"                )\\n\",\n    \"                logger.warning(\\n\",\n    \"                    f\\\"{self.__class__.__name__}.start(): calling nest_asyncio.apply()\\\"\\n\",\n    \"                )\\n\",\n    \"                nest_asyncio.apply(loop)\\n\",\n    \"            else:\\n\",\n    \"                msg = f\\\"{self.__class__.__name__}.start(): ({loop}) is already running! Use 'apply_nest_asyncio=True' when creating 'LocalRedpandaBroker' to prevent this.\\\"\\n\",\n    \"                logger.error(msg)\\n\",\n    \"                raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"        retval = loop.run_until_complete(self._start())\\n\",\n    \"        logger.info(f\\\"{self.__class__}.start(): returning {retval}\\\")\\n\",\n    \"        return retval\\n\",\n    \"    finally:\\n\",\n    \"        logger.info(f\\\"{self.__class__.__name__}.start(): exited.\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def stop(self: LocalRedpandaBroker) -> None:\\n\",\n    \"    \\\"\\\"\\\"Stops a local redpanda broker instance synchronously\\\"\\\"\\\"\\n\",\n    \"    logger.info(f\\\"{self.__class__.__name__}.stop(): entering...\\\")\\n\",\n    \"    try:\\n\",\n    \"        if not self._is_started:\\n\",\n    \"            raise RuntimeError(\\n\",\n    \"                \\\"LocalRedpandaBroker not started yet, please call LocalRedpandaBroker.start() before!\\\"\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"        loop = asyncio.get_event_loop()\\n\",\n    \"        loop.run_until_complete(self._stop())\\n\",\n    \"    finally:\\n\",\n    \"        logger.info(f\\\"{self.__class__.__name__}.stop(): exited.\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"54c92191\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"broker = LocalRedpandaBroker(apply_nest_asyncio=True)\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"    with broker:\\n\",\n    \"        print(\\\"Hello world!\\\")\\n\",\n    \"\\n\",\n    \"    print(\\\"*\\\" * 50 + \\\"REDPANDA LOGS\\\" + \\\"+\\\" * 50)\\n\",\n    \"    redpanda_output, _ = await broker.redpanda_task.communicate()\\n\",\n    \"    print(redpanda_output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"94867ea9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"test_topic = \\\"test-topic\\\"\\n\",\n    \"test_msg = b\\\"test-msg\\\"\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"    with LocalRedpandaBroker(\\n\",\n    \"        topics=[test_topic], apply_nest_asyncio=True\\n\",\n    \"    ) as bootstrap_server:\\n\",\n    \"        consumer = AIOKafkaConsumer(test_topic, bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"        producer = AIOKafkaProducer(bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"        await consumer.start()\\n\",\n    \"        await producer.start()\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            await producer.send_and_wait(test_topic, test_msg)\\n\",\n    \"            msg = await consumer.getone()\\n\",\n    \"            assert msg, value == test_msg\\n\",\n    \"        finally:\\n\",\n    \"            await consumer.stop()\\n\",\n    \"            await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f68f7183\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"test_topic = \\\"test-topic\\\"\\n\",\n    \"test_msg = b\\\"test-msg\\\"\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"    async with LocalRedpandaBroker(topics=[test_topic]) as bootstrap_server:\\n\",\n    \"        consumer = AIOKafkaConsumer(test_topic, bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"        producer = AIOKafkaProducer(bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"        await consumer.start()\\n\",\n    \"        await producer.start()\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            await producer.send_and_wait(test_topic, test_msg)\\n\",\n    \"            msg = await consumer.getone()\\n\",\n    \"            assert msg, value == test_msg\\n\",\n    \"        finally:\\n\",\n    \"            await consumer.stop()\\n\",\n    \"            await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6335b063\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def start_redpanda_brokers(\\n\",\n    \"    kafka_brokers_name: str,\\n\",\n    \"    kafka_brokers: List[Dict[str, Dict[str, Any]]],\\n\",\n    \"    duplicate_ok: bool = False,\\n\",\n    \"    ignore_nonlocal_brokers: bool = False,\\n\",\n    \") -> AsyncIterator[None]:\\n\",\n    \"    unique_local_brokers_to_start = await _get_unique_local_brokers_to_start(\\n\",\n    \"        kafka_brokers_name=kafka_brokers_name,\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        duplicate_ok=duplicate_ok,\\n\",\n    \"        ignore_nonlocal_brokers=ignore_nonlocal_brokers,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    brokers = [\\n\",\n    \"        LocalRedpandaBroker(listener_port=broker[1])  # type: ignore\\n\",\n    \"        for broker in unique_local_brokers_to_start\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    async with _start_and_stop_brokers(brokers=brokers):\\n\",\n    \"        yield\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"858c8a09\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"kafka_broker_1 = dict(\\n\",\n    \"    development=dict(url=\\\"localhost\\\", port=39092),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"kafka_broker_2 = dict(\\n\",\n    \"    development=dict(url=\\\"localhost\\\", port=39093),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"kafka_brokers = [kafka_broker_1, kafka_broker_2]\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"    async with start_redpanda_brokers(\\n\",\n    \"        kafka_brokers_name=\\\"development\\\",\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"    ):\\n\",\n    \"        print(\\\"Succesfully started multiple brokers\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8cc7ebbd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"kafka_brokers = [\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"localhost\\\", port=39092),\\n\",\n    \"    ),\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"localhost\\\", port=39092),\\n\",\n    \"    ),\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"\\n\",\n    \"    with pytest.raises(ValueError) as e:\\n\",\n    \"        async with start_redpanda_brokers(\\n\",\n    \"            kafka_brokers_name=\\\"development\\\",\\n\",\n    \"            kafka_brokers=kafka_brokers,\\n\",\n    \"            duplicate_ok=False,\\n\",\n    \"        ):\\n\",\n    \"            print(\\\"If this is printed then the test didn't pass\\\")\\n\",\n    \"    print(e)\\n\",\n    \"    assert \\\"Duplicate kafka_brokers are found\\\" in str(e)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"979b4406\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"kafka_brokers = [\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"127.0.0.1\\\", port=39092),\\n\",\n    \"    ),\\n\",\n    \"    dict(\\n\",\n    \"        development=dict(url=\\\"some_other_host\\\", port=39092),\\n\",\n    \"    ),\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"# Run test case only if docker is installed\\n\",\n    \"if await check_docker():\\n\",\n    \"\\n\",\n    \"    with pytest.raises(ValueError) as e:\\n\",\n    \"        async with start_redpanda_brokers(\\n\",\n    \"            kafka_brokers_name=\\\"development\\\",\\n\",\n    \"            kafka_brokers=kafka_brokers,\\n\",\n    \"            ignore_nonlocal_brokers=False,\\n\",\n    \"        ):\\n\",\n    \"            print(\\\"If this is printed then the test didn't pass\\\")\\n\",\n    \"    print(e)\\n\",\n    \"    assert \\\"URL values other than 0.0.0.0, 127.0.0.1, localhost are found\\\" in str(e)\\n\",\n    \"\\n\",\n    \"    async with start_redpanda_brokers(\\n\",\n    \"        kafka_brokers_name=\\\"development\\\",\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        ignore_nonlocal_brokers=True,\\n\",\n    \"    ):\\n\",\n    \"        print(\\\"Starting again with 'ignore_nonlocal_brokers=True'\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6404c272\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/004_Test_Utils.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cc959176\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _testing.test_utils\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a39bc80a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import hashlib\\n\",\n    \"import platform\\n\",\n    \"import shlex\\n\",\n    \"import signal\\n\",\n    \"import subprocess  # nosec\\n\",\n    \"import unittest\\n\",\n    \"import unittest.mock\\n\",\n    \"from contextlib import contextmanager\\n\",\n    \"from pathlib import Path\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"from IPython.display import IFrame\\n\",\n    \"\\n\",\n    \"from fastkafka._application.app import FastKafka\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._components.helpers import _import_from_string, change_dir\\n\",\n    \"from fastkafka._components.logger import get_logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"911a1ccc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import time\\n\",\n    \"from inspect import signature\\n\",\n    \"\\n\",\n    \"import anyio\\n\",\n    \"import nest_asyncio\\n\",\n    \"import pytest\\n\",\n    \"from nbdev_mkdocs.docstring import run_examples_from_docstring\\n\",\n    \"from tqdm.notebook import tqdm, trange\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._helpers import consumes_messages, produce_messages\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import ApacheKafkaBroker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7c64116e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a2eb08b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1f3eee37\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"48f69103\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def nb_safe_seed(s: str) -> Callable[[int], int]:\\n\",\n    \"    \\\"\\\"\\\"Gets a unique seed function for a notebook\\n\",\n    \"\\n\",\n    \"    Params:\\n\",\n    \"        s: name of the notebook used to initialize the seed function\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A unique seed function\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    init_seed = int(hashlib.sha256(s.encode(\\\"utf-8\\\")).hexdigest(), 16) % (10**8)\\n\",\n    \"\\n\",\n    \"    def _get_seed(x: int = 0, *, init_seed: int = init_seed) -> int:\\n\",\n    \"        return init_seed + x\\n\",\n    \"\\n\",\n    \"    return _get_seed\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dfba6520\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"seed = nb_safe_seed(\\\"999_test_utils\\\")\\n\",\n    \"\\n\",\n    \"assert seed() == seed(0)\\n\",\n    \"assert seed() + 1 == seed(1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9b04d73d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@contextmanager\\n\",\n    \"def mock_AIOKafkaProducer_send() -> Generator[unittest.mock.Mock, None, None]:\\n\",\n    \"    \\\"\\\"\\\"Mocks **send** method of **AIOKafkaProducer**\\\"\\\"\\\"\\n\",\n    \"    with unittest.mock.patch(\\\"__main__.AIOKafkaProducer.send\\\") as mock:\\n\",\n    \"\\n\",\n    \"        async def _f() -> None:\\n\",\n    \"            pass\\n\",\n    \"\\n\",\n    \"        mock.return_value = asyncio.create_task(_f())\\n\",\n    \"\\n\",\n    \"        yield mock\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"85420005\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def run_script_and_cancel(\\n\",\n    \"    script: str,\\n\",\n    \"    *,\\n\",\n    \"    script_file: Optional[str] = None,\\n\",\n    \"    cmd: Optional[str] = None,\\n\",\n    \"    cancel_after: int = 10,\\n\",\n    \"    app_name: str = \\\"app\\\",\\n\",\n    \"    kafka_app_name: str = \\\"kafka_app\\\",\\n\",\n    \"    generate_docs: bool = False,\\n\",\n    \") -> Tuple[int, bytes]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Runs a script and cancels it after a predefined time.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        script: A python source code to be executed in a separate subprocess.\\n\",\n    \"        script_file: Name of the script where script source will be saved.\\n\",\n    \"        cmd: Command to execute. If None, it will be set to 'python3 -m {Path(script_file).stem}'.\\n\",\n    \"        cancel_after: Number of seconds before sending SIGTERM signal.\\n\",\n    \"        app_name: Name of the app.\\n\",\n    \"        kafka_app_name: Name of the Kafka app.\\n\",\n    \"        generate_docs: Flag indicating whether to generate docs.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A tuple containing the exit code and combined stdout and stderr as a binary string.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if script_file is None:\\n\",\n    \"        script_file = \\\"script.py\\\"\\n\",\n    \"\\n\",\n    \"    if cmd is None:\\n\",\n    \"        cmd = f\\\"python3 -m {Path(script_file).stem}\\\"\\n\",\n    \"\\n\",\n    \"    with TemporaryDirectory() as d:\\n\",\n    \"        consumer_script = Path(d) / script_file\\n\",\n    \"\\n\",\n    \"        with open(consumer_script, \\\"w\\\") as file:\\n\",\n    \"            file.write(script)\\n\",\n    \"\\n\",\n    \"        if generate_docs:\\n\",\n    \"            logger.info(\\n\",\n    \"                f\\\"Generating docs for: {Path(script_file).stem}:{kafka_app_name}\\\"\\n\",\n    \"            )\\n\",\n    \"            try:\\n\",\n    \"                kafka_app: FastKafka = _import_from_string(\\n\",\n    \"                    f\\\"{Path(script_file).stem}:{kafka_app_name}\\\"\\n\",\n    \"                )\\n\",\n    \"                await asyncer.asyncify(kafka_app.create_docs)()\\n\",\n    \"            except Exception as e:\\n\",\n    \"                logger.warning(\\n\",\n    \"                    f\\\"Generating docs failed for: {Path(script_file).stem}:{kafka_app_name}, ignoring it for now.\\\"\\n\",\n    \"                )\\n\",\n    \"\\n\",\n    \"        creationflags = 0 if platform.system() != \\\"Windows\\\" else subprocess.CREATE_NEW_PROCESS_GROUP # type: ignore\\n\",\n    \"        proc = subprocess.Popen(\\n\",\n    \"            shlex.split(cmd),\\n\",\n    \"            stdout=subprocess.PIPE,\\n\",\n    \"            stderr=subprocess.STDOUT,\\n\",\n    \"            cwd=d,\\n\",\n    \"            shell=True  # nosec: [B602:subprocess_without_shell_equals_true] subprocess call - check for execution of untrusted input.\\n\",\n    \"            if platform.system() == \\\"Windows\\\"\\n\",\n    \"            else False,\\n\",\n    \"            creationflags=creationflags,\\n\",\n    \"        )\\n\",\n    \"        await asyncio.sleep(cancel_after)\\n\",\n    \"        if platform.system() == \\\"Windows\\\":\\n\",\n    \"            proc.send_signal(signal.CTRL_BREAK_EVENT) # type: ignore\\n\",\n    \"        else:\\n\",\n    \"            proc.terminate()\\n\",\n    \"        output, _ = proc.communicate()\\n\",\n    \"\\n\",\n    \"        return (proc.returncode, output)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"09054da6\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check exit code 0\\n\",\n    \"script = \\\"\\\"\\\"\\n\",\n    \"from time import sleep\\n\",\n    \"print(\\\"hello\\\")\\n\",\n    \"sleep({t})\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"exit_code, output = await run_script_and_cancel(script.format(t=0), cancel_after=2)\\n\",\n    \"assert exit_code == 0, f\\\"{exit_code=}, {output=}\\\"\\n\",\n    \"assert output.decode(\\\"utf-8\\\").strip() == \\\"hello\\\", output.decode(\\\"utf-8\\\")\\n\",\n    \"\\n\",\n    \"exit_code, output = await run_script_and_cancel(script.format(t=5), cancel_after=2)\\n\",\n    \"if platform.system() == \\\"Windows\\\":\\n\",\n    \"    assert exit_code == 3221225786, f\\\"{exit_code=}, {output=}\\\"\\n\",\n    \"else:\\n\",\n    \"    assert exit_code < 0, f\\\"{exit_code=}, {output=}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0c8484fa\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check exit code 1\\n\",\n    \"script = \\\"exit(1)\\\"\\n\",\n    \"\\n\",\n    \"exit_code, output = await run_script_and_cancel(script, cancel_after=1)\\n\",\n    \"\\n\",\n    \"assert exit_code == 1\\n\",\n    \"assert output.decode(\\\"utf-8\\\") == \\\"\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"40aaf329\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check exit code 0 and output to stdout and stderr\\n\",\n    \"script = \\\"\\\"\\\"\\n\",\n    \"import sys\\n\",\n    \"sys.stderr.write(\\\"hello from stderr\\\\\\\\n\\\")\\n\",\n    \"sys.stderr.flush()\\n\",\n    \"print(\\\"hello, exiting with exit code 0\\\")\\n\",\n    \"exit(0)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"exit_code, output = await run_script_and_cancel(script, cancel_after=1)\\n\",\n    \"\\n\",\n    \"line_separator = \\\"\\\\r\\\\n\\\" if platform.system() == \\\"Windows\\\" else \\\"\\\\n\\\"\\n\",\n    \"\\n\",\n    \"assert exit_code == 0, exit_code\\n\",\n    \"assert (\\n\",\n    \"    output.decode(\\\"utf-8\\\") == f\\\"hello from stderr{line_separator}hello, exiting with exit code 0{line_separator}\\\"\\n\",\n    \"), output.decode(\\\"utf-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b346af63\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Check random exit code and output\\n\",\n    \"script = \\\"\\\"\\\"\\n\",\n    \"print(\\\"hello\\\\\\\\nexiting with exit code 143\\\")\\n\",\n    \"exit(143)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"exit_code, output = await run_script_and_cancel(script, cancel_after=1)\\n\",\n    \"\\n\",\n    \"line_separator = \\\"\\\\r\\\\n\\\" if platform.system() == \\\"Windows\\\" else \\\"\\\\n\\\"\\n\",\n    \"\\n\",\n    \"assert exit_code == 143\\n\",\n    \"assert output.decode(\\\"utf-8\\\") == f\\\"hello{line_separator}exiting with exit code 143{line_separator}\\\"\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7ef6219a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def display_docs(docs_path: str, port: int = 4000) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Serves the documentation using an HTTP server.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        docs_path: Path to the documentation.\\n\",\n    \"        port: Port number for the HTTP server. Defaults to 4000.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    with change_dir(docs_path):\\n\",\n    \"        process = await asyncio.create_subprocess_exec(\\n\",\n    \"            \\\"python3\\\",\\n\",\n    \"            \\\"-m\\\",\\n\",\n    \"            \\\"http.server\\\",\\n\",\n    \"            f\\\"{port}\\\",\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        )\\n\",\n    \"        try:\\n\",\n    \"            from google.colab.output import eval_js\\n\",\n    \"\\n\",\n    \"            proxy = eval_js(f\\\"google.colab.kernel.proxyPort({port})\\\")\\n\",\n    \"            logger.info(\\\"Google colab detected! Proxy adjusted.\\\")\\n\",\n    \"        except:\\n\",\n    \"            proxy = f\\\"http://localhost:{port}\\\"\\n\",\n    \"        finally:\\n\",\n    \"            await asyncio.sleep(2)\\n\",\n    \"            display(IFrame(f\\\"{proxy}\\\", 1000, 700))  # type: ignore\\n\",\n    \"            await asyncio.sleep(2)\\n\",\n    \"            await terminate_asyncio_process(process)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5ebe5474\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"example_html = \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    \\n\",\n    \"        Example\\n\",\n    \"    \\n\",\n    \"    \\n\",\n    \"        This is an example of a simple HTML page with one paragraph.\\n\",\n    \"    \\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as tmp:\\n\",\n    \"    with change_dir(tmp):\\n\",\n    \"        with open(Path(tmp) / \\\"index.html\\\", \\\"w\\\") as index_file:\\n\",\n    \"            index_file.write(example_html)\\n\",\n    \"        await display_docs(docs_path=tmp, port=4000)\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/005_Application_executors_export.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"90b702dd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp executors\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"94e92657\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from fastkafka._components.meta import export\\n\",\n    \"from fastkafka._components.task_streaming import SequentialExecutor, DynamicTaskExecutor\\n\",\n    \"\\n\",\n    \"__all__ = [\\\"SequentialExecutor\\\", \\\"DynamicTaskExecutor\\\"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"253f85c7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"_dummy\\\")\\n\",\n    \"def dummy() -> None:\\n\",\n    \"    pass\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/006_TaskStreaming.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2934706f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.task_streaming\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"66b86144\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export \\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import sys\\n\",\n    \"from abc import ABC, abstractmethod\\n\",\n    \"\\n\",\n    \"from asyncio import Task\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import anyio\\n\",\n    \"from aiokafka import ConsumerRecord\\n\",\n    \"\\n\",\n    \"from logging import Logger\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import export\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"777beaa2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from datetime import datetime, timedelta\\n\",\n    \"\\n\",\n    \"from anyio import create_task_group, create_memory_object_stream, ExceptionGroup\\n\",\n    \"from unittest.mock import Mock, MagicMock, AsyncMock\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"import pytest\\n\",\n    \"from aiokafka import ConsumerRecord, TopicPartition\\n\",\n    \"from pydantic import BaseModel, Field, HttpUrl, NonNegativeInt\\n\",\n    \"from tqdm.notebook import tqdm\\n\",\n    \"from types import CoroutineType\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bb6e2212\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"11c31bb0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1c8da6a8\",\n   \"metadata\": {},\n   \"source\": [\n    \"## anyio stream is not running tasks in parallel\\n\",\n    \"> Memory object stream is buffering the messages but the messages are consumed one by one and a new one is consumed only after the last one is finished\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"80816012\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"9285928200db4a55b0124f898b0e4984\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"  0%|          | 0/10 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"bd84a793afbc42f4ab61ecd78e211c02\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"  0%|          | 0/5 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"num_msgs = 5\\n\",\n    \"latency = 0.2\\n\",\n    \"\\n\",\n    \"receive_pbar = tqdm(total=num_msgs*2)\\n\",\n    \"\\n\",\n    \"async def latency_task():\\n\",\n    \"    receive_pbar.update(1)\\n\",\n    \"    await asyncio.sleep(latency)\\n\",\n    \"    receive_pbar.update(1)\\n\",\n    \"\\n\",\n    \"async def process_message_callback(\\n\",\n    \"        receive_stream,\\n\",\n    \") -> None:\\n\",\n    \"    async with receive_stream:\\n\",\n    \"        async for task in receive_stream:\\n\",\n    \"            await task\\n\",\n    \"\\n\",\n    \"send_stream, receive_stream = anyio.create_memory_object_stream(\\n\",\n    \"    max_buffer_size=num_msgs\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"t0 = datetime.now()\\n\",\n    \"async with anyio.create_task_group() as tg:\\n\",\n    \"    tg.start_soon(process_message_callback, receive_stream)\\n\",\n    \"    async with send_stream:\\n\",\n    \"        for i in tqdm(range(num_msgs)):\\n\",\n    \"            await send_stream.send(latency_task())\\n\",\n    \"            \\n\",\n    \"assert datetime.now() - t0 >= timedelta(seconds=latency*num_msgs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b7a41e0a\",\n   \"metadata\": {},\n   \"source\": [\n    \"To solve this, we can create tasks from coroutines and let them run in background while the receive_stream is spawning new tasks whithout being blocked by previous ones.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"842893fa\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"b686c64cc60d4a31b12d34ad80a213a8\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"  0%|          | 0/20000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"a6a76d2fb99644238ac475883f1ad197\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"  0%|          | 0/10000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"num_msgs = 10_000\\n\",\n    \"latency = 4.0\\n\",\n    \"\\n\",\n    \"receive_pbar = tqdm(total=num_msgs*2)\\n\",\n    \"\\n\",\n    \"async def latency_task():\\n\",\n    \"    receive_pbar.update(1)\\n\",\n    \"    await asyncio.sleep(latency)\\n\",\n    \"    receive_pbar.update(1)\\n\",\n    \"\\n\",\n    \"tasks = set()\\n\",\n    \"\\n\",\n    \"async def process_message_callback(\\n\",\n    \"        receive_stream,\\n\",\n    \") -> None:\\n\",\n    \"    async with receive_stream:\\n\",\n    \"        async for f in receive_stream:\\n\",\n    \"            task: asyncio.Task = asyncio.create_task(f())\\n\",\n    \"            tasks.add(task)\\n\",\n    \"            task.add_done_callback(lambda task=task, tasks=tasks: tasks.remove(task))\\n\",\n    \"\\n\",\n    \"send_stream, receive_stream = anyio.create_memory_object_stream(\\n\",\n    \"    max_buffer_size=num_msgs\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"t0 = datetime.now()\\n\",\n    \"async with anyio.create_task_group() as tg:\\n\",\n    \"    tg.start_soon(process_message_callback, receive_stream)\\n\",\n    \"    async with send_stream:\\n\",\n    \"        for i in tqdm(range(num_msgs)):\\n\",\n    \"            await send_stream.send(latency_task)\\n\",\n    \"\\n\",\n    \"await asyncio.sleep(latency/2)\\n\",\n    \"receive_pbar.refresh()\\n\",\n    \"assert receive_pbar.n == num_msgs, receive_pbar.n\\n\",\n    \"\\n\",\n    \"while len(tasks) > 0:\\n\",\n    \"    await asyncio.sleep(0)\\n\",\n    \"await send_stream.aclose()\\n\",\n    \"    \\n\",\n    \"receive_pbar.close()\\n\",\n    \"assert datetime.now() - t0 <= timedelta(seconds=latency+5.0)\\n\",\n    \"assert receive_pbar.n == num_msgs*2, receive_pbar.n\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"839cceba\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Keeping track of tasks\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4b255e4f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TaskPool:\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        size: int = 100_000,\\n\",\n    \"        on_error: Optional[Callable[[BaseException], None]] = None,\\n\",\n    \"    ):\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initializes a TaskPool instance.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            size: The size of the task pool. Defaults to 100,000.\\n\",\n    \"            on_error: Optional callback function to handle task errors. Defaults to None.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.size = size\\n\",\n    \"        self.pool: Set[Task] = set()\\n\",\n    \"        self.on_error = on_error\\n\",\n    \"        self.finished = False\\n\",\n    \"\\n\",\n    \"    async def add(self, item: Task) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Adds a task to the task pool.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            item: The task to be added.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        while len(self.pool) >= self.size:\\n\",\n    \"            await asyncio.sleep(0)\\n\",\n    \"        self.pool.add(item)\\n\",\n    \"        item.add_done_callback(self.discard)\\n\",\n    \"\\n\",\n    \"    def discard(self, task: Task) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Discards a completed task from the task pool.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            task: The completed task to be discarded.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        e = task.exception()\\n\",\n    \"        if e is not None and self.on_error is not None:\\n\",\n    \"            try:\\n\",\n    \"                self.on_error(e)\\n\",\n    \"            except Exception as ee:\\n\",\n    \"                logger.warning(\\n\",\n    \"                    f\\\"Exception {ee} raised when calling on_error() callback: {e}\\\"\\n\",\n    \"                )\\n\",\n    \"\\n\",\n    \"        self.pool.discard(task)\\n\",\n    \"\\n\",\n    \"    def __len__(self) -> int:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Returns the number of tasks in the task pool.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The number of tasks in the task pool.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return len(self.pool)\\n\",\n    \"\\n\",\n    \"    async def __aenter__(self) -> \\\"TaskPool\\\":\\n\",\n    \"        self.finished = False\\n\",\n    \"        return self\\n\",\n    \"\\n\",\n    \"    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\\n\",\n    \"        while len(self) > 0:\\n\",\n    \"            await asyncio.sleep(0)\\n\",\n    \"        self.finished = True\\n\",\n    \"\\n\",\n    \"    @staticmethod\\n\",\n    \"    def log_error(logger: Logger) -> Callable[[Exception], None]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Creates a decorator that logs errors using the specified logger.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            logger: The logger to use for error logging.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The decorator function.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        def _log_error(e: Exception, logger: Logger = logger) -> None:\\n\",\n    \"            logger.warning(f\\\"{e=}\\\")\\n\",\n    \"\\n\",\n    \"        return _log_error\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ce06ffff\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"async with TaskPool() as tp:\\n\",\n    \"    pass\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b0625abf\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"async def f():\\n\",\n    \"    await asyncio.sleep(2)\\n\",\n    \"\\n\",\n    \"pool = TaskPool()\\n\",\n    \"assert len(pool) == 0\\n\",\n    \"\\n\",\n    \"async with pool:\\n\",\n    \"    task = asyncio.create_task(f())\\n\",\n    \"    await pool.add(task)\\n\",\n    \"    assert len(pool) == 1\\n\",\n    \"\\n\",\n    \"assert len(pool) == 0, len(pool)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f931de1e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[WARNING] __main__: e=RuntimeError('funny error')\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async def f():\\n\",\n    \"    raise RuntimeError(\\\"funny error\\\")\\n\",\n    \"\\n\",\n    \"        \\n\",\n    \"    return _log_error\\n\",\n    \"    \\n\",\n    \"pool = TaskPool(on_error=TaskPool.log_error(logger))\\n\",\n    \"\\n\",\n    \"async with pool:\\n\",\n    \"    task = asyncio.create_task(f())\\n\",\n    \"    await pool.add(task)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6cd66e07\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"class ExceptionMonitor:\\n\",\n    \"    def __init__(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initializes an ExceptionMonitor instance.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.exceptions: List[Exception] = []\\n\",\n    \"        self.exception_found = False\\n\",\n    \"\\n\",\n    \"    def on_error(self, e: Exception) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Handles an error by storing the exception.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            e: The exception to be handled.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.exceptions.append(e)\\n\",\n    \"        self.exception_found = True\\n\",\n    \"\\n\",\n    \"    def _monitor_step(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Raises the next exception in the queue.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        if len(self.exceptions) > 0:\\n\",\n    \"            e = self.exceptions.pop(0)\\n\",\n    \"            raise e\\n\",\n    \"\\n\",\n    \"    async def __aenter__(self) -> \\\"ExceptionMonitor\\\":\\n\",\n    \"        return self\\n\",\n    \"\\n\",\n    \"    async def __aexit__(self, *args: Any, **kwargs: Any) -> None:\\n\",\n    \"        while len(self.exceptions) > 0:\\n\",\n    \"            self._monitor_step()\\n\",\n    \"            await asyncio.sleep(0)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"16135fe2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"e=<ExceptionInfo RuntimeError('very funny error.') tblen=4>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"no_tasks = 1\\n\",\n    \"\\n\",\n    \"async def f():\\n\",\n    \"    raise RuntimeError(f\\\"very funny error.\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"exception_monitor = ExceptionMonitor()\\n\",\n    \"pool = TaskPool(on_error=exception_monitor.on_error)\\n\",\n    \"\\n\",\n    \"async def create_tasks():\\n\",\n    \"    for _ in range(no_tasks):\\n\",\n    \"        task = asyncio.create_task(f())\\n\",\n    \"        await pool.add(task)\\n\",\n    \"        await asyncio.sleep(0.1) # otherwise the tasks get created before any of them throws an exception\\n\",\n    \"        if exception_monitor.exception_found:\\n\",\n    \"            break\\n\",\n    \"        \\n\",\n    \"with pytest.raises(RuntimeError) as e:\\n\",\n    \"    async with exception_monitor, pool:\\n\",\n    \"        async with asyncer.create_task_group() as tg:\\n\",\n    \"            tg.soonify(create_tasks)()\\n\",\n    \"            \\n\",\n    \"print(f\\\"{e=}\\\")\\n\",\n    \"assert exception_monitor.exceptions == [], len(exception_monitor.exceptions)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d46e1354\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Streaming\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d56d47ec\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class StreamExecutor(ABC):\\n\",\n    \"    @abstractmethod\\n\",\n    \"    async def run(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        *,\\n\",\n    \"        is_shutting_down_f: Callable[[], bool],\\n\",\n    \"        generator: Callable[[], Awaitable[ConsumerRecord]],\\n\",\n    \"        processor: Callable[[ConsumerRecord], Awaitable[None]],\\n\",\n    \"    ) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Abstract method for running the stream executor.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            is_shutting_down_f: Function to check if the executor is shutting down.\\n\",\n    \"            generator: Generator function for retrieving consumer records.\\n\",\n    \"            processor: Processor function for processing consumer records.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        pass\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"61503ef7\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Streaming tasks\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8ac5c907\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"mock = Mock()\\n\",\n    \"async_mock = asyncer.asyncify(mock)\\n\",\n    \"\\n\",\n    \"async def process_items(receive_stream):\\n\",\n    \"    async with receive_stream:\\n\",\n    \"        async for item in receive_stream:\\n\",\n    \"            task = asyncio.create_task(async_mock(item))\\n\",\n    \"            await pool.add(task)\\n\",\n    \"\\n\",\n    \"send_stream, receive_stream = create_memory_object_stream()\\n\",\n    \"pool = TaskPool()\\n\",\n    \"\\n\",\n    \"async with pool:\\n\",\n    \"    async with create_task_group() as tg:\\n\",\n    \"        tg.start_soon(process_items, receive_stream)\\n\",\n    \"        async with send_stream:\\n\",\n    \"            await send_stream.send(f\\\"hi\\\")\\n\",\n    \"\\n\",\n    \"mock.assert_called()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8a0b5bd2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _process_items_task(  # type: ignore\\n\",\n    \"    processor: Callable[[ConsumerRecord], Awaitable[None]], task_pool: TaskPool\\n\",\n    \") -> Callable[\\n\",\n    \"    [\\n\",\n    \"        anyio.streams.memory.MemoryObjectReceiveStream,\\n\",\n    \"        Callable[[ConsumerRecord], Awaitable[None]],\\n\",\n    \"        bool,\\n\",\n    \"    ],\\n\",\n    \"    Coroutine[Any, Any, Awaitable[None]],\\n\",\n    \"]:\\n\",\n    \"    async def _process_items_wrapper(  # type: ignore\\n\",\n    \"        receive_stream: anyio.streams.memory.MemoryObjectReceiveStream,\\n\",\n    \"        processor: Callable[[ConsumerRecord], Awaitable[None]] = processor,\\n\",\n    \"        task_pool=task_pool,\\n\",\n    \"    ):\\n\",\n    \"        async with receive_stream:\\n\",\n    \"            async for msg in receive_stream:\\n\",\n    \"                task: asyncio.Task = asyncio.create_task(processor(msg))  # type: ignore\\n\",\n    \"                await task_pool.add(task)\\n\",\n    \"\\n\",\n    \"    return _process_items_wrapper\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2711f803\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.executors\\\")\\n\",\n    \"class DynamicTaskExecutor(StreamExecutor):\\n\",\n    \"    \\\"\\\"\\\"A class that implements a dynamic task executor for processing consumer records.\\n\",\n    \"\\n\",\n    \"    The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\\n\",\n    \"    for running a tasks in parallel using asyncio.Task.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        throw_exceptions: bool = False,\\n\",\n    \"        max_buffer_size: int = 100_000,\\n\",\n    \"        size: int = 100_000,\\n\",\n    \"    ):\\n\",\n    \"        \\\"\\\"\\\"Create an instance of DynamicTaskExecutor\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            throw_exceptions: Flag indicating whether exceptions should be thrown ot logged.\\n\",\n    \"                Defaults to False.\\n\",\n    \"            max_buffer_size: Maximum buffer size for the memory object stream.\\n\",\n    \"                Defaults to 100_000.\\n\",\n    \"            size: Size of the task pool. Defaults to 100_000.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.throw_exceptions = throw_exceptions\\n\",\n    \"        self.max_buffer_size = max_buffer_size\\n\",\n    \"        self.exception_monitor = ExceptionMonitor()\\n\",\n    \"        self.task_pool = TaskPool(\\n\",\n    \"            on_error=self.exception_monitor.on_error  # type: ignore\\n\",\n    \"            if throw_exceptions\\n\",\n    \"            else TaskPool.log_error(logger),\\n\",\n    \"            size=size,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    async def run(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        *,\\n\",\n    \"        is_shutting_down_f: Callable[[], bool],\\n\",\n    \"        generator: Callable[[], Awaitable[ConsumerRecord]],\\n\",\n    \"        processor: Callable[[ConsumerRecord], Awaitable[None]],\\n\",\n    \"    ) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Runs the dynamic task executor.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            is_shutting_down_f: Function to check if the executor is shutting down.\\n\",\n    \"            generator: Generator function for retrieving consumer records.\\n\",\n    \"            processor: Processor function for processing consumer records.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        send_stream, receive_stream = anyio.create_memory_object_stream(\\n\",\n    \"            max_buffer_size=self.max_buffer_size\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        async with self.exception_monitor, self.task_pool:\\n\",\n    \"            async with anyio.create_task_group() as tg:\\n\",\n    \"                tg.start_soon(\\n\",\n    \"                    _process_items_task(processor, self.task_pool), receive_stream\\n\",\n    \"                )\\n\",\n    \"                async with send_stream:\\n\",\n    \"                    while not is_shutting_down_f():\\n\",\n    \"                        if (\\n\",\n    \"                            self.exception_monitor.exception_found\\n\",\n    \"                            and self.throw_exceptions\\n\",\n    \"                        ):\\n\",\n    \"                            break\\n\",\n    \"                        msgs = await generator()\\n\",\n    \"                        for msg in msgs:\\n\",\n    \"                            await send_stream.send(msg)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7bc13156\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def is_shutting_down_f(call_count:int = 1) -> Callable[[], bool]:\\n\",\n    \"    count = {\\\"count\\\": 0}\\n\",\n    \"    \\n\",\n    \"    def _is_shutting_down_f(count=count, call_count:int = call_count):\\n\",\n    \"        if count[\\\"count\\\"]>=call_count:\\n\",\n    \"            return True\\n\",\n    \"        else:\\n\",\n    \"            count[\\\"count\\\"] = count[\\\"count\\\"] + 1\\n\",\n    \"            return False\\n\",\n    \"        \\n\",\n    \"    return _is_shutting_down_f\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0385d5ee\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"f = is_shutting_down_f()\\n\",\n    \"assert f() == False\\n\",\n    \"assert f() == True\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c65ce4ab\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"msg\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async def produce():\\n\",\n    \"    return [\\\"msg\\\"]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def consume(msg):\\n\",\n    \"    print(msg)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"stream = DynamicTaskExecutor()\\n\",\n    \"\\n\",\n    \"await stream.run(\\n\",\n    \"    is_shutting_down_f=is_shutting_down_f(),\\n\",\n    \"    generator=produce,\\n\",\n    \"    processor=consume,\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"46d82173\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"mock_produce = AsyncMock(spec=CoroutineType, return_value=[\\\"msg\\\"])\\n\",\n    \"mock_consume = AsyncMock(spec=CoroutineType)\\n\",\n    \"\\n\",\n    \"stream = DynamicTaskExecutor()\\n\",\n    \"\\n\",\n    \"await stream.run(\\n\",\n    \"    is_shutting_down_f=is_shutting_down_f(),\\n\",\n    \"    generator=mock_produce,\\n\",\n    \"    processor=mock_consume,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"mock_produce.assert_awaited()\\n\",\n    \"mock_consume.assert_awaited_with(\\\"msg\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7494a22c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"mock_produce = AsyncMock(spec=CoroutineType, return_value=[\\\"msg\\\"])\\n\",\n    \"mock_consume = AsyncMock(spec=CoroutineType)\\n\",\n    \"\\n\",\n    \"stream = DynamicTaskExecutor()\\n\",\n    \"\\n\",\n    \"await stream.run(\\n\",\n    \"    is_shutting_down_f=is_shutting_down_f(),\\n\",\n    \"    generator=mock_produce,\\n\",\n    \"    processor=mock_consume,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"mock_produce.assert_called()\\n\",\n    \"mock_consume.assert_called_with(\\\"msg\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"128ac8f8\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"num_msgs = 13\\n\",\n    \"\\n\",\n    \"mock_produce = AsyncMock(spec=CoroutineType, return_value=[\\\"msg\\\"])\\n\",\n    \"mock_consume = AsyncMock(spec=CoroutineType)\\n\",\n    \"mock_consume.side_effect = RuntimeError()\\n\",\n    \"\\n\",\n    \"stream = DynamicTaskExecutor(throw_exceptions=True)\\n\",\n    \"\\n\",\n    \"with pytest.raises(RuntimeError) as e:\\n\",\n    \"    await stream.run(\\n\",\n    \"        is_shutting_down_f=is_shutting_down_f(num_msgs),\\n\",\n    \"        generator=mock_produce,\\n\",\n    \"        processor=mock_consume,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"mock_produce.assert_called()\\n\",\n    \"mock_consume.assert_awaited_with(\\\"msg\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fd855e8a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\",\n      \"[WARNING] __main__: e=RuntimeError()\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"num_msgs = 13\\n\",\n    \"\\n\",\n    \"mock_produce = AsyncMock(spec=CoroutineType, return_value=[\\\"msg\\\"])\\n\",\n    \"mock_consume = AsyncMock(spec=CoroutineType)\\n\",\n    \"mock_consume.side_effect = RuntimeError()\\n\",\n    \"\\n\",\n    \"stream = DynamicTaskExecutor()\\n\",\n    \"\\n\",\n    \"await stream.run(\\n\",\n    \"    is_shutting_down_f=is_shutting_down_f(num_msgs),\\n\",\n    \"    generator=mock_produce,\\n\",\n    \"    processor=mock_consume,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"mock_produce.assert_called()\\n\",\n    \"mock_consume.assert_awaited_with(\\\"msg\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"276db314\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Awaiting coroutines\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d595bbb3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _process_items_coro(  # type: ignore\\n\",\n    \"    processor: Callable[[ConsumerRecord], Awaitable[None]],\\n\",\n    \"    throw_exceptions: bool,\\n\",\n    \") -> Callable[\\n\",\n    \"    [\\n\",\n    \"        anyio.streams.memory.MemoryObjectReceiveStream,\\n\",\n    \"        Callable[[ConsumerRecord], Awaitable[None]],\\n\",\n    \"        bool,\\n\",\n    \"    ],\\n\",\n    \"    Coroutine[Any, Any, Awaitable[None]],\\n\",\n    \"]:\\n\",\n    \"    async def _process_items_wrapper(  # type: ignore\\n\",\n    \"        receive_stream: anyio.streams.memory.MemoryObjectReceiveStream,\\n\",\n    \"        processor: Callable[[ConsumerRecord], Awaitable[None]] = processor,\\n\",\n    \"        throw_exceptions: bool = throw_exceptions,\\n\",\n    \"    ) -> Awaitable[None]:\\n\",\n    \"        async with receive_stream:\\n\",\n    \"            async for msg in receive_stream:\\n\",\n    \"                try:\\n\",\n    \"                    await processor(msg)\\n\",\n    \"                except Exception as e:\\n\",\n    \"                    if throw_exceptions:\\n\",\n    \"                        raise e\\n\",\n    \"                    else:\\n\",\n    \"                        logger.warning(f\\\"{e=}\\\")\\n\",\n    \"\\n\",\n    \"    return _process_items_wrapper\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2a81d03b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.executors\\\")\\n\",\n    \"class SequentialExecutor(StreamExecutor):\\n\",\n    \"    \\\"\\\"\\\"A class that implements a sequential executor for processing consumer records.\\n\",\n    \"\\n\",\n    \"    The SequentialExecutor class extends the StreamExecutor class and provides functionality\\n\",\n    \"    for running processing tasks in sequence by awaiting their coroutines.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        throw_exceptions: bool = False,\\n\",\n    \"        max_buffer_size: int = 100_000,\\n\",\n    \"    ):\\n\",\n    \"        \\\"\\\"\\\"Create an instance of SequentialExecutor\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            throw_exceptions: Flag indicating whether exceptions should be thrown or logged.\\n\",\n    \"                Defaults to False.\\n\",\n    \"            max_buffer_size: Maximum buffer size for the memory object stream.\\n\",\n    \"                Defaults to 100_000.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.throw_exceptions = throw_exceptions\\n\",\n    \"        self.max_buffer_size = max_buffer_size\\n\",\n    \"\\n\",\n    \"    async def run(  # type: ignore\\n\",\n    \"        self,\\n\",\n    \"        *,\\n\",\n    \"        is_shutting_down_f: Callable[[], bool],\\n\",\n    \"        generator: Callable[[], Awaitable[ConsumerRecord]],\\n\",\n    \"        processor: Callable[[ConsumerRecord], Awaitable[None]],\\n\",\n    \"    ) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Runs the sequential executor.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            is_shutting_down_f: Function to check if the executor is shutting down.\\n\",\n    \"            generator: Generator function for retrieving consumer records.\\n\",\n    \"            processor: Processor function for processing consumer records.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        send_stream, receive_stream = anyio.create_memory_object_stream(\\n\",\n    \"            max_buffer_size=self.max_buffer_size\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        async with anyio.create_task_group() as tg:\\n\",\n    \"            tg.start_soon(\\n\",\n    \"                _process_items_coro(processor, self.throw_exceptions), receive_stream\\n\",\n    \"            )\\n\",\n    \"            async with send_stream:\\n\",\n    \"                while not is_shutting_down_f():\\n\",\n    \"                    msgs = await generator()\\n\",\n    \"                    for msg in msgs:\\n\",\n    \"                        await send_stream.send(msg)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9fa058a1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"num_msgs = 13\\n\",\n    \"\\n\",\n    \"mock_produce = AsyncMock(spec=CoroutineType, return_value=[\\\"msg\\\"])\\n\",\n    \"mock_consume = AsyncMock(spec=CoroutineType)\\n\",\n    \"mock_consume.side_effect = RuntimeError(\\\"Funny error\\\")\\n\",\n    \"\\n\",\n    \"stream = SequentialExecutor(throw_exceptions=True)\\n\",\n    \"\\n\",\n    \"with pytest.raises(ExceptionGroup) as e:\\n\",\n    \"    await stream.run(\\n\",\n    \"        is_shutting_down_f=is_shutting_down_f(num_msgs),\\n\",\n    \"        generator=mock_produce,\\n\",\n    \"        processor=mock_consume,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"mock_produce.assert_called()\\n\",\n    \"mock_consume.assert_awaited_with(\\\"msg\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b9fa60fb\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\",\n      \"[WARNING] __main__: e=RuntimeError('Funny error')\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"num_msgs = 13\\n\",\n    \"\\n\",\n    \"mock_produce = AsyncMock(spec=CoroutineType, return_value=[\\\"msg\\\"])\\n\",\n    \"mock_consume = AsyncMock(spec=CoroutineType)\\n\",\n    \"mock_consume.side_effect = RuntimeError(\\\"Funny error\\\")\\n\",\n    \"\\n\",\n    \"stream = SequentialExecutor()\\n\",\n    \"\\n\",\n    \"await stream.run(\\n\",\n    \"    is_shutting_down_f=is_shutting_down_f(num_msgs),\\n\",\n    \"    generator=mock_produce,\\n\",\n    \"    processor=mock_consume,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"mock_produce.assert_called()\\n\",\n    \"mock_consume.assert_awaited_with(\\\"msg\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1fc03222\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_executor(executor: Union[str, StreamExecutor, None] = None) -> StreamExecutor:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Returns an instance of the specified executor.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        executor: Executor instance or name of the executor.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Instance of the specified executor.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        AttributeError: If the executor is not found.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if isinstance(executor, StreamExecutor):\\n\",\n    \"        return executor\\n\",\n    \"    elif executor is None:\\n\",\n    \"        executor = \\\"SequentialExecutor\\\"\\n\",\n    \"    return getattr(sys.modules[\\\"fastkafka._components.task_streaming\\\"], executor)()  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ab892f59\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"for executor in [None, \\\"SequentialExecutor\\\", SequentialExecutor()]:\\n\",\n    \"    actual = get_executor(executor)\\n\",\n    \"    assert actual.__class__.__qualname__ == \\\"SequentialExecutor\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3bba3526\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"for executor in [\\\"DynamicTaskExecutor\\\", DynamicTaskExecutor()]:\\n\",\n    \"    actual = get_executor(executor)\\n\",\n    \"    assert actual.__class__.__qualname__ == \\\"DynamicTaskExecutor\\\"\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/010_Application_export.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"90b702dd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp __init__\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"94e92657\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"from fastkafka._application.app import FastKafka\\n\",\n    \"from fastkafka._components.meta import export\\n\",\n    \"from fastkafka._components.producer_decorator import KafkaEvent\\n\",\n    \"from fastkafka._components.aiokafka_consumer_loop import EventMetadata\\n\",\n    \"\\n\",\n    \"__all__ = [\\n\",\n    \"    \\\"FastKafka\\\",\\n\",\n    \"    \\\"KafkaEvent\\\",\\n\",\n    \"    \\\"EventMetadata\\\"\\n\",\n    \"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"253f85c7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"_dummy\\\")\\n\",\n    \"def dummy() -> None:\\n\",\n    \"    pass\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/011_ConsumerLoop.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"79dfbe1f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.aiokafka_consumer_loop\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0aaf843a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"/home/kumaran/.local/lib/python3.11/site-packages/pydantic/_internal/_config.py:257: UserWarning: Valid config keys have changed in V2:\\n\",\n      \"* 'json_encoders' has been removed\\n\",\n      \"  warnings.warn(message, UserWarning)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from asyncio import iscoroutinefunction, Task  # do not use the version from inspect\\n\",\n    \"from typing import *\\n\",\n    \"from dataclasses import dataclass\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"from aiokafka.structs import ConsumerRecord\\n\",\n    \"from pydantic import BaseModel\\n\",\n    \"\\n\",\n    \"import fastkafka._aiokafka_imports\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import delegates, export\\n\",\n    \"from fastkafka._components.task_streaming import get_executor, StreamExecutor\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5a446cf5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import asyncio\\n\",\n    \"import pytest\\n\",\n    \"from datetime import datetime, timedelta\\n\",\n    \"from unittest.mock import AsyncMock, MagicMock, Mock, call, patch, create_autospec\\n\",\n    \"\\n\",\n    \"import anyio\\n\",\n    \"from aiokafka.structs import TopicPartition\\n\",\n    \"from pydantic import Field, HttpUrl, NonNegativeInt\\n\",\n    \"from tqdm.notebook import tqdm\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import true_after\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._helpers import produce_messages\\n\",\n    \"from fastkafka.encoder import avro_decoder, avro_encoder, json_decoder\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d0e9e4c9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"53542175\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"af85a823\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"92feb585\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"aeaf0d5f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class MyMessage(BaseModel):\\n\",\n    \"    url: HttpUrl = Field(..., example=\\\"http://www.acme.com\\\", description=\\\"Url example\\\")\\n\",\n    \"    port: NonNegativeInt = Field(1000)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"68cf645d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@dataclass\\n\",\n    \"@export(\\\"fastkafka\\\")\\n\",\n    \"class EventMetadata:\\n\",\n    \"    \\\"\\\"\\\"A class for encapsulating Kafka record metadata.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The topic this record is received from\\n\",\n    \"        partition: The partition from which this record is received\\n\",\n    \"        offset: The position of this record in the corresponding Kafka partition\\n\",\n    \"        timestamp: The timestamp of this record\\n\",\n    \"        timestamp_type: The timestamp type of this record\\n\",\n    \"        key: The key (or `None` if no key is specified)\\n\",\n    \"        value: The value\\n\",\n    \"        serialized_key_size: The size of the serialized, uncompressed key in bytes\\n\",\n    \"        serialized_value_size: The size of the serialized, uncompressed value in bytes\\n\",\n    \"        headers: The headers\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    topic: str\\n\",\n    \"    partition: int\\n\",\n    \"    offset: int\\n\",\n    \"    timestamp: int\\n\",\n    \"    timestamp_type: int\\n\",\n    \"    key: Optional[bytes]\\n\",\n    \"    value: Optional[bytes]\\n\",\n    \"    checksum: int\\n\",\n    \"    serialized_key_size: int\\n\",\n    \"    serialized_value_size: int\\n\",\n    \"    headers: Sequence[Tuple[str, bytes]]\\n\",\n    \"\\n\",\n    \"    @staticmethod\\n\",\n    \"    def create_event_metadata(record: ConsumerRecord) -> \\\"EventMetadata\\\":  # type: ignore\\n\",\n    \"        \\\"\\\"\\\"Creates an instance of EventMetadata from a ConsumerRecord.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            record: The Kafka ConsumerRecord.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The created EventMetadata instance.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return EventMetadata(\\n\",\n    \"            topic=record.topic,\\n\",\n    \"            partition=record.partition,\\n\",\n    \"            offset=record.offset,\\n\",\n    \"            timestamp=record.timestamp,\\n\",\n    \"            timestamp_type=record.timestamp_type,\\n\",\n    \"            value=record.value,\\n\",\n    \"            checksum=record.checksum,\\n\",\n    \"            key=record.key,\\n\",\n    \"            serialized_key_size=record.serialized_key_size,\\n\",\n    \"            serialized_value_size=record.serialized_value_size,\\n\",\n    \"            headers=record.headers,\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f4e0d145\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def create_consumer_record(topic: str, partition: int, msg: BaseModel):\\n\",\n    \"    record = ConsumerRecord(\\n\",\n    \"        topic=topic,\\n\",\n    \"        partition=partition,\\n\",\n    \"        offset=0,\\n\",\n    \"        timestamp=0,\\n\",\n    \"        timestamp_type=0,\\n\",\n    \"        key=None,\\n\",\n    \"        value=msg.model_dump_json().encode(\\\"utf-8\\\")\\n\",\n    \"        if hasattr(msg, \\\"json\\\")\\n\",\n    \"        else msg.encode(\\\"utf-8\\\"),\\n\",\n    \"        checksum=0,\\n\",\n    \"        serialized_key_size=0,\\n\",\n    \"        serialized_value_size=0,\\n\",\n    \"        headers=[],\\n\",\n    \"    )\\n\",\n    \"    return record\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2bf5ffd7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"meta = EventMetadata.create_event_metadata(create_consumer_record(\\\"topic\\\", 1, MyMessage(url=\\\"http://www.acme.com\\\", port=22)))\\n\",\n    \"assert meta.topic == \\\"topic\\\"\\n\",\n    \"assert meta.partition == 1\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5bb76fa8\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"AsyncConsume = Callable[[Union[List[BaseModel], BaseModel]], Awaitable[None]]\\n\",\n    \"AsyncConsumeMeta =  Callable[[Union[List[BaseModel], BaseModel], Union[List[EventMetadata], EventMetadata]], Awaitable[None]]\\n\",\n    \"SyncConsume = Callable[[Union[List[BaseModel], BaseModel]], None]\\n\",\n    \"SyncConsumeMeta =  Callable[[Union[List[BaseModel], BaseModel], Union[List[EventMetadata], EventMetadata]], None]\\n\",\n    \"\\n\",\n    \"ConsumeCallable = Union[\\n\",\n    \"    AsyncConsume, AsyncConsumeMeta, SyncConsume, SyncConsumeMeta\\n\",\n    \"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"01f24d56\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _callback_parameters_wrapper(\\n\",\n    \"    callback: Union[AsyncConsume, AsyncConsumeMeta]\\n\",\n    \") -> AsyncConsumeMeta:\\n\",\n    \"    \\\"\\\"\\\"Wraps an async callback and filters the arguments to pass based on if the function accepts EventMetadata as argument\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        callback: async callable that will be wrapped\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Wrapped callback with filtered params\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    async def _params_wrap(\\n\",\n    \"        msg: Union[BaseModel, List[BaseModel]],\\n\",\n    \"        meta: Union[EventMetadata, List[EventMetadata]],\\n\",\n    \"        callback: Union[AsyncConsume, AsyncConsumeMeta] = callback,\\n\",\n    \"    ) -> None:\\n\",\n    \"        types = list(get_type_hints(callback).values())\\n\",\n    \"        args: List[Union[BaseModel, List[BaseModel], EventMetadata, List[EventMetadata]]] = [msg]\\n\",\n    \"        if EventMetadata in types:\\n\",\n    \"            args.insert(types.index(EventMetadata), meta)\\n\",\n    \"        if List[EventMetadata] in types:\\n\",\n    \"            args.insert(types.index(List[EventMetadata]), meta)\\n\",\n    \"        await callback(*args)  # type: ignore\\n\",\n    \"\\n\",\n    \"    return _params_wrap\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b833bd5b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"async def without_meta(msg: BaseModel):\\n\",\n    \"    assert msg == \\\"Example_msg\\\"\\n\",\n    \"\\n\",\n    \"with pytest.raises(TypeError) as e:\\n\",\n    \"    await without_meta(\\\"Example_msg\\\", \\\"Some_meta\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7ba7b7f1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@_callback_parameters_wrapper\\n\",\n    \"async def without_meta(msg: BaseModel):\\n\",\n    \"    assert msg == \\\"Example_msg\\\"\\n\",\n    \"\\n\",\n    \"await without_meta(\\\"Example_msg\\\", \\\"Some_meta\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"859b390d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@_callback_parameters_wrapper\\n\",\n    \"async def with_meta(msg: BaseModel, meta: EventMetadata):\\n\",\n    \"    assert msg == \\\"Example_msg\\\"\\n\",\n    \"    assert meta == \\\"Some_meta\\\"\\n\",\n    \"\\n\",\n    \"await with_meta(\\\"Example_msg\\\", \\\"Some_meta\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ec76eaf3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@_callback_parameters_wrapper\\n\",\n    \"async def with_meta(msg: List[BaseModel], meta: List[EventMetadata]):\\n\",\n    \"    assert msg == \\\"Example_msg\\\"\\n\",\n    \"    assert meta == \\\"Some_meta\\\"\\n\",\n    \"\\n\",\n    \"await with_meta(\\\"Example_msg\\\", \\\"Some_meta\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5fd1a5c7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _prepare_callback(\\n\",\n    \"    callback: ConsumeCallable\\n\",\n    \") -> AsyncConsumeMeta:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Prepares a callback to be used in the consumer loop.\\n\",\n    \"        1. If callback is sync, asyncify it\\n\",\n    \"        2. Wrap the callback into a safe callback for exception handling\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        callback: async callable that will be prepared for use in consumer\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Prepared callback\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    async_callback: Union[AsyncConsume, AsyncConsumeMeta] = (\\n\",\n    \"        callback if iscoroutinefunction(callback) else asyncer.asyncify(callback)  # type: ignore\\n\",\n    \"    )\\n\",\n    \"    return _callback_parameters_wrapper(async_callback)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7e996f4b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check if callback is called when wrapped\\n\",\n    \"\\n\",\n    \"for is_async in [False, True]:\\n\",\n    \"    example_msg = \\\"Example msg\\\"\\n\",\n    \"    callback = AsyncMock() if is_async else Mock()\\n\",\n    \"    prepared_callback = _prepare_callback(callback)\\n\",\n    \"\\n\",\n    \"    with patch(\\\"__main__.get_type_hints\\\") as mock:\\n\",\n    \"        mock.return_value = {\\\"msg\\\": BaseModel}\\n\",\n    \"        await prepared_callback(f\\\"{example_msg}\\\", \\\"Some meta\\\")\\n\",\n    \"\\n\",\n    \"    callback.assert_called_once_with(f\\\"{example_msg}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9e0977bd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"async def _stream_msgs(  # type: ignore\\n\",\n    \"    msgs: Dict[TopicPartition, bytes],\\n\",\n    \"    send_stream: anyio.streams.memory.MemoryObjectSendStream[Any],\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decodes and streams the message and topic to the send_stream.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        msgs:\\n\",\n    \"        send_stream:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    for topic_partition, topic_msgs in msgs.items():\\n\",\n    \"        topic = topic_partition.topic\\n\",\n    \"        try:\\n\",\n    \"            await send_stream.send(topic_msgs)\\n\",\n    \"        except Exception as e:\\n\",\n    \"            logger.warning(\\n\",\n    \"                f\\\"_stream_msgs(): Unexpected exception '{e.__repr__()}' caught and ignored for topic='{topic_partition.topic}', partition='{topic_partition.partition}' and messages: {topic_msgs!r}\\\"\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _decode_streamed_msgs(  # type: ignore\\n\",\n    \"    msgs: List[ConsumerRecord], msg_type: BaseModel\\n\",\n    \") -> List[BaseModel]:\\n\",\n    \"    decoded_msgs = [msg_type.parse_raw(msg.value.decode(\\\"utf-8\\\")) for msg in msgs]\\n\",\n    \"    return decoded_msgs\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"335aa93b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Sanity check: one msg, one topic\\n\",\n    \"\\n\",\n    \"with patch(\\\"anyio.streams.memory.MemoryObjectSendStream.send\\\") as mock:\\n\",\n    \"    send_stream, receive_stream = anyio.create_memory_object_stream()\\n\",\n    \"\\n\",\n    \"    topic = \\\"topic_0\\\"\\n\",\n    \"    partition = 0\\n\",\n    \"    topic_part_0_0 = TopicPartition(topic, partition)\\n\",\n    \"    msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"    record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"    await _stream_msgs(\\n\",\n    \"        msgs={topic_part_0_0: [record]},\\n\",\n    \"        send_stream=send_stream,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    mock.assert_called_once()\\n\",\n    \"    mock.assert_has_calls([call([record])])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f25ecc98\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check different topics\\n\",\n    \"\\n\",\n    \"# Two msg, two topics, send called twice with each topic\\n\",\n    \"\\n\",\n    \"with patch(\\\"anyio.streams.memory.MemoryObjectSendStream.send\\\") as mock:\\n\",\n    \"    send_stream, receive_stream = anyio.create_memory_object_stream()\\n\",\n    \"\\n\",\n    \"    topic_partitions = [(\\\"topic_0\\\", 0), (\\\"topic_1\\\", 0)]\\n\",\n    \"\\n\",\n    \"    msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"    msgs = {\\n\",\n    \"        TopicPartition(topic, partition): [\\n\",\n    \"            create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"        ]\\n\",\n    \"        for topic, partition in topic_partitions\\n\",\n    \"    }\\n\",\n    \"\\n\",\n    \"    await _stream_msgs(\\n\",\n    \"        msgs=msgs,\\n\",\n    \"        send_stream=send_stream,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    assert mock.call_count == 2\\n\",\n    \"\\n\",\n    \"    mock.assert_has_calls([call(msg) for msg in msgs.values()])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ff3fa870\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check multiple msgs in same topic\\n\",\n    \"\\n\",\n    \"# Two msg, one topic, send called twice for same topic\\n\",\n    \"\\n\",\n    \"with patch(\\\"anyio.streams.memory.MemoryObjectSendStream.send\\\") as mock:\\n\",\n    \"    send_stream, receive_stream = anyio.create_memory_object_stream()\\n\",\n    \"\\n\",\n    \"    topic_partitions = [(\\\"topic_0\\\", 0)]\\n\",\n    \"\\n\",\n    \"    msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"    record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"    msgs = {\\n\",\n    \"        TopicPartition(topic, partition): [\\n\",\n    \"            create_consumer_record(topic=topic, partition=partition, msg=msg),\\n\",\n    \"            create_consumer_record(topic=topic, partition=partition, msg=msg),\\n\",\n    \"        ]\\n\",\n    \"        for topic, partition in topic_partitions\\n\",\n    \"    }\\n\",\n    \"\\n\",\n    \"    await _stream_msgs(\\n\",\n    \"        msgs=msgs,\\n\",\n    \"        send_stream=send_stream,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    mock.assert_has_calls([call(msg) for msg in msgs.values()])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"403988a0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Check multiple partitions\\n\",\n    \"\\n\",\n    \"# Two msg, one topic, differenct partitions, send called twice for same topic\\n\",\n    \"\\n\",\n    \"with patch(\\\"anyio.streams.memory.MemoryObjectSendStream.send\\\") as mock:\\n\",\n    \"    send_stream, receive_stream = anyio.create_memory_object_stream()\\n\",\n    \"\\n\",\n    \"    topic_partitions = [(\\\"topic_0\\\", 0), (\\\"topic_0\\\", 1)]\\n\",\n    \"\\n\",\n    \"    msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"    msgs = {\\n\",\n    \"        TopicPartition(topic, partition): [\\n\",\n    \"            create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"        ]\\n\",\n    \"        for topic, partition in topic_partitions\\n\",\n    \"    }\\n\",\n    \"    record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"    await _stream_msgs(\\n\",\n    \"        msgs=msgs,\\n\",\n    \"        send_stream=send_stream,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    mock.assert_has_calls([call(msg) for msg in msgs.values()])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e5275963\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_single_msg_handlers(  # type: ignore\\n\",\n    \"    *,\\n\",\n    \"    consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer,\\n\",\n    \"    callback: AsyncConsumeMeta,\\n\",\n    \"    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\\n\",\n    \"    msg_type: Type[BaseModel],\\n\",\n    \"    **kwargs: Any,\\n\",\n    \") -> Tuple[\\n\",\n    \"    Callable[\\n\",\n    \"        [\\n\",\n    \"            ConsumerRecord,\\n\",\n    \"            AsyncConsumeMeta,\\n\",\n    \"            Callable[[bytes, Type[BaseModel]], Any],\\n\",\n    \"            Type[BaseModel],\\n\",\n    \"        ],\\n\",\n    \"        Awaitable[None],\\n\",\n    \"    ],\\n\",\n    \"    Callable[\\n\",\n    \"        [fastkafka._aiokafka_imports.AIOKafkaConsumer, Any],\\n\",\n    \"        Awaitable[List[ConsumerRecord]],\\n\",\n    \"    ],\\n\",\n    \"]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Retrieves the message handlers for consuming single messages from a Kafka topic.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        consumer: The Kafka consumer instance.\\n\",\n    \"        callback: The callback function to handle the consumed message.\\n\",\n    \"        decoder_fn: The function to decode the consumed message.\\n\",\n    \"        msg_type: The type of the consumed message.\\n\",\n    \"        **kwargs: Additional keyword arguments for the consumer.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The handle_msg function and poll_consumer function.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    async def handle_msg(  # type: ignore\\n\",\n    \"        record: ConsumerRecord,\\n\",\n    \"        callback: AsyncConsumeMeta = callback,\\n\",\n    \"        decoder_fn: Callable[[bytes, Type[BaseModel]], Any] = decoder_fn,\\n\",\n    \"        msg_type: Type[BaseModel] = msg_type,\\n\",\n    \"    ) -> None:\\n\",\n    \"        await callback(\\n\",\n    \"            decoder_fn(record.value, msg_type),\\n\",\n    \"            EventMetadata.create_event_metadata(record),\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    async def poll_consumer(  # type: ignore\\n\",\n    \"        consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer = consumer,\\n\",\n    \"        kwargs: Any = kwargs,\\n\",\n    \"    ) -> List[ConsumerRecord]:\\n\",\n    \"        msgs = await consumer.getmany(**kwargs)\\n\",\n    \"        return [msg for msg_group in msgs.values() for msg in msg_group]\\n\",\n    \"\\n\",\n    \"    return handle_msg, poll_consumer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cc43f2f7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"topic_partitions = [(\\\"topic_0\\\", 0), (\\\"topic_0\\\", 1)]\\n\",\n    \"\\n\",\n    \"msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"msgs = {\\n\",\n    \"    TopicPartition(topic, partition): [\\n\",\n    \"        create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"    ]\\n\",\n    \"    for topic, partition in topic_partitions\\n\",\n    \"}\\n\",\n    \"record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"consumer = AsyncMock()\\n\",\n    \"consumer.getmany.return_value = msgs\\n\",\n    \"\\n\",\n    \"callback = AsyncMock()\\n\",\n    \"decoder_fn = json_decoder\\n\",\n    \"msg_type = MyMessage\\n\",\n    \"\\n\",\n    \"handle_msg, poll_consumer = _get_single_msg_handlers(\\n\",\n    \"    consumer=consumer, callback=callback, decoder_fn=decoder_fn, msg_type=msg_type\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"got_msgs = await poll_consumer()\\n\",\n    \"assert len(msgs.values()) == len(got_msgs)\\n\",\n    \"\\n\",\n    \"for msg in got_msgs:\\n\",\n    \"    await handle_msg(msg)\\n\",\n    \"\\n\",\n    \"callback.assert_has_awaits(\\n\",\n    \"    [\\n\",\n    \"        call(\\n\",\n    \"            json_decoder(msg.value, msg_type), EventMetadata.create_event_metadata(msg)\\n\",\n    \"        )\\n\",\n    \"        for msg in got_msgs\\n\",\n    \"    ]\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5ddd1b59\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_batch_msg_handlers(  # type: ignore\\n\",\n    \"    *,\\n\",\n    \"    consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer,\\n\",\n    \"    callback: AsyncConsumeMeta,\\n\",\n    \"    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\\n\",\n    \"    msg_type: Type[BaseModel],\\n\",\n    \"    **kwargs: Any,\\n\",\n    \") -> Tuple[\\n\",\n    \"    Callable[\\n\",\n    \"        [\\n\",\n    \"            List[ConsumerRecord],\\n\",\n    \"            AsyncConsumeMeta,\\n\",\n    \"            Callable[[bytes, Type[BaseModel]], Any],\\n\",\n    \"            Type[BaseModel],\\n\",\n    \"        ],\\n\",\n    \"        Awaitable[None],\\n\",\n    \"    ],\\n\",\n    \"    Callable[[fastkafka._aiokafka_imports.AIOKafkaConsumer, Any], Awaitable[List[List[ConsumerRecord]]]],\\n\",\n    \"]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Retrieves the message handlers for consuming messages in batches from a Kafka topic.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        consumer: The Kafka consumer instance.\\n\",\n    \"        callback: The callback function to handle the consumed messages.\\n\",\n    \"        decoder_fn: The function to decode the consumed messages.\\n\",\n    \"        msg_type: The type of the consumed messages.\\n\",\n    \"        **kwargs: Additional keyword arguments for the consumer.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The handle_msg function and poll_consumer function.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    async def handle_msg(  # type: ignore\\n\",\n    \"        records: List[ConsumerRecord],\\n\",\n    \"        callback: AsyncConsumeMeta = callback,\\n\",\n    \"        decoder_fn: Callable[[bytes, Type[BaseModel]], Any] = decoder_fn,\\n\",\n    \"        msg_type: Type[BaseModel] = msg_type,\\n\",\n    \"    ) -> None:\\n\",\n    \"        await callback(\\n\",\n    \"            [decoder_fn(record.value, msg_type) for record in records],\\n\",\n    \"            [EventMetadata.create_event_metadata(record) for record in records],\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    async def poll_consumer(  # type: ignore\\n\",\n    \"        consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer = consumer, kwargs: Any = kwargs\\n\",\n    \"    ) -> List[List[ConsumerRecord]]:\\n\",\n    \"        msgs = await consumer.getmany(**kwargs)\\n\",\n    \"        return [value for value in msgs.values() if len(value)>0]\\n\",\n    \"\\n\",\n    \"    return handle_msg, poll_consumer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"18367eb4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"topic_partitions = [(\\\"topic_0\\\", 0), (\\\"topic_0\\\", 1)]\\n\",\n    \"\\n\",\n    \"msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"msgs = {\\n\",\n    \"    TopicPartition(topic, partition): [\\n\",\n    \"        create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"    ]\\n\",\n    \"    for topic, partition in topic_partitions\\n\",\n    \"}\\n\",\n    \"record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"consumer = AsyncMock()\\n\",\n    \"consumer.getmany.return_value = msgs\\n\",\n    \"\\n\",\n    \"callback = AsyncMock()\\n\",\n    \"decoder_fn = json_decoder\\n\",\n    \"msg_type = MyMessage\\n\",\n    \"\\n\",\n    \"handle_msg, poll_consumer = _get_batch_msg_handlers(\\n\",\n    \"    consumer=consumer, callback=callback, decoder_fn=decoder_fn, msg_type=msg_type\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"got_msgs = await poll_consumer()\\n\",\n    \"assert len(msgs.values()) == len(got_msgs)\\n\",\n    \"\\n\",\n    \"for msgs in got_msgs:\\n\",\n    \"    assert len(msgs) == 1\\n\",\n    \"\\n\",\n    \"for msg in got_msgs:\\n\",\n    \"    await handle_msg(msg)\\n\",\n    \"\\n\",\n    \"callback.assert_has_awaits(\\n\",\n    \"    [\\n\",\n    \"        call(\\n\",\n    \"            [json_decoder(msg_unwrapped.value, msg_type) for msg_unwrapped in msg],\\n\",\n    \"            [EventMetadata.create_event_metadata(msg_unwrapped) for msg_unwrapped in msg],\\n\",\n    \"        )\\n\",\n    \"        for msg in got_msgs\\n\",\n    \"    ]\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"df02ed2b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer.getmany)\\n\",\n    \"async def _aiokafka_consumer_loop(  # type: ignore\\n\",\n    \"    consumer: fastkafka._aiokafka_imports.AIOKafkaConsumer,\\n\",\n    \"    *,\\n\",\n    \"    topic: str,\\n\",\n    \"    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\\n\",\n    \"    callback: ConsumeCallable,\\n\",\n    \"    max_buffer_size: int = 100_000,\\n\",\n    \"    msg_type: Union[Type[List[BaseModel]], Type[BaseModel]],\\n\",\n    \"    is_shutting_down_f: Callable[[], bool],\\n\",\n    \"    executor: Union[str, StreamExecutor, None] = None,\\n\",\n    \"    **kwargs: Any,\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Consumer loop for infinite pooling of the AIOKafka consumer for new messages. Calls consumer.getmany()\\n\",\n    \"    and after the consumer return messages or times out, messages are decoded and streamed to defined callback.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: Topic to subscribe\\n\",\n    \"        decoder_fn: Function to decode the messages consumed from the topic\\n\",\n    \"        callbacks: Dict of callbacks mapped to their respective topics\\n\",\n    \"        timeout_ms: Time to timeut the getmany request by the consumer\\n\",\n    \"        max_buffer_size: Maximum number of unconsumed messages in the callback buffer\\n\",\n    \"        msg_types: Dict of message types mapped to their respective topics\\n\",\n    \"        is_shutting_down_f: Function for controlling the shutdown of consumer loop\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    prepared_callback = _prepare_callback(callback)\\n\",\n    \"\\n\",\n    \"    if hasattr(msg_type, \\\"__origin__\\\") and msg_type.__origin__ == list:\\n\",\n    \"        handle_msg, poll_consumer = _get_batch_msg_handlers(\\n\",\n    \"            consumer=consumer,\\n\",\n    \"            callback=prepared_callback,\\n\",\n    \"            decoder_fn=decoder_fn,\\n\",\n    \"            msg_type=msg_type.__args__[0],  # type: ignore\\n\",\n    \"            **kwargs,\\n\",\n    \"        )\\n\",\n    \"    else:\\n\",\n    \"        handle_msg, poll_consumer = _get_single_msg_handlers(\\n\",\n    \"            consumer=consumer,\\n\",\n    \"            callback=prepared_callback,\\n\",\n    \"            decoder_fn=decoder_fn,\\n\",\n    \"            msg_type=msg_type,  # type: ignore\\n\",\n    \"            **kwargs,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    await get_executor(executor).run(\\n\",\n    \"        is_shutting_down_f=is_shutting_down_f,\\n\",\n    \"        generator=poll_consumer,  # type: ignore\\n\",\n    \"        processor=handle_msg,  # type: ignore\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b4a60f81\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def is_shutting_down_f(mock_func: Mock, num_calls: int = 1) -> Callable[[], bool]:\\n\",\n    \"    def _is_shutting_down_f():\\n\",\n    \"        return mock_func.call_count == num_calls\\n\",\n    \"\\n\",\n    \"    return _is_shutting_down_f\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3020fa4a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from fastkafka._components.task_streaming import SequentialExecutor\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"77397e6a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic = \\\"topic_0\\\"\\n\",\n    \"partition = 0\\n\",\n    \"msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"mock_consumer = MagicMock()\\n\",\n    \"msgs = {TopicPartition(topic, 0): [record]}\\n\",\n    \"\\n\",\n    \"f = asyncio.Future()\\n\",\n    \"f.set_result(msgs)\\n\",\n    \"mock_consumer.configure_mock(**{\\\"getmany.return_value\\\": f})\\n\",\n    \"\\n\",\n    \"def f(msg: MyMessage): pass\\n\",\n    \"mock_callback = MagicMock(spec=f)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"for is_async in [True, False]:\\n\",\n    \"    for executor_type in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"        await _aiokafka_consumer_loop(\\n\",\n    \"            consumer=mock_consumer,\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=json_decoder,\\n\",\n    \"            max_buffer_size=100,\\n\",\n    \"            timeout_ms=10,\\n\",\n    \"            callback=asyncer.asyncify(mock_callback) if is_async else mock_callback,\\n\",\n    \"            msg_type=MyMessage,\\n\",\n    \"            is_shutting_down_f=is_shutting_down_f(mock_consumer.getmany),\\n\",\n    \"            executor_type=executor_type,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert mock_consumer.getmany.call_count == 1\\n\",\n    \"        mock_callback.assert_called_once_with(msg)\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a6854bb7\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"topic = \\\"topic_0\\\"\\n\",\n    \"partition = 0\\n\",\n    \"msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"mock_consumer = MagicMock()\\n\",\n    \"msgs = {TopicPartition(topic, 0): [record]}\\n\",\n    \"\\n\",\n    \"f = asyncio.Future()\\n\",\n    \"f.set_result(msgs)\\n\",\n    \"mock_consumer.configure_mock(**{\\\"getmany.return_value\\\": f})\\n\",\n    \"\\n\",\n    \"def f(msg: List[MyMessage]): pass\\n\",\n    \"mock_callback = MagicMock(spec=f)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"for is_async in [True, False]:\\n\",\n    \"    for executor_type in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"        await _aiokafka_consumer_loop(\\n\",\n    \"            consumer=mock_consumer,\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=json_decoder,\\n\",\n    \"            max_buffer_size=100,\\n\",\n    \"            timeout_ms=10,\\n\",\n    \"            callback=asyncer.asyncify(mock_callback) if is_async else mock_callback,\\n\",\n    \"            msg_type=List[MyMessage],\\n\",\n    \"            is_shutting_down_f=is_shutting_down_f(mock_consumer.getmany),\\n\",\n    \"            executor_type=executor_type,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert mock_consumer.getmany.call_count == 1\\n\",\n    \"        mock_callback.assert_called_once_with([msg])\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"54b9e6fa\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=Exception('')\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Sanity check: exception in callback recovery\\n\",\n    \"# Two msg, one topic, process_f called twice even tough it throws\\n\",\n    \"\\n\",\n    \"for is_async in [True, False]:\\n\",\n    \"    for executor_type in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"        topic = \\\"topic_0\\\"\\n\",\n    \"        partition = 0\\n\",\n    \"        msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"        record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"\\n\",\n    \"        num_msgs = 2\\n\",\n    \"\\n\",\n    \"        mock_consumer = MagicMock()\\n\",\n    \"        msgs = {TopicPartition(topic, 0): [record, record]}\\n\",\n    \"\\n\",\n    \"        f = asyncio.Future()\\n\",\n    \"        f.set_result(msgs)\\n\",\n    \"\\n\",\n    \"        mock_consumer.configure_mock(**{\\\"getmany.return_value\\\": f})\\n\",\n    \"        mock_callback = Mock()\\n\",\n    \"\\n\",\n    \"        exception = Exception(\\\"\\\")\\n\",\n    \"        mock_callback.side_effect = exception\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"        await _aiokafka_consumer_loop(\\n\",\n    \"            consumer=mock_consumer,\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=json_decoder,\\n\",\n    \"            max_buffer_size=100,\\n\",\n    \"            timeout_ms=1,\\n\",\n    \"            callback=asyncer.asyncify(mock_callback) if is_async else mock_callback,\\n\",\n    \"            msg_type=MyMessage,\\n\",\n    \"            is_shutting_down_f=is_shutting_down_f(mock_consumer.getmany, num_calls=1),\\n\",\n    \"            executor_type=executor_type,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert mock_callback.call_count == num_msgs, mock_callback.call_count\\n\",\n    \"        mock_callback.assert_has_calls([call(msg), call(msg)])\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2afe654a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Sanity check: malformed msgs\\n\",\n    \"# One msg of wrong type, two normal msg, one topic, process_f called twice\\n\",\n    \"\\n\",\n    \"topic = \\\"topic_0\\\"\\n\",\n    \"partition = 0\\n\",\n    \"msg = MyMessage(url=\\\"http://www.acme.com\\\", port=22)\\n\",\n    \"correct_record = create_consumer_record(topic=topic, partition=partition, msg=msg)\\n\",\n    \"faulty_record = create_consumer_record(topic=topic, partition=partition, msg=\\\"Wrong!\\\")\\n\",\n    \"\\n\",\n    \"mock_consumer = MagicMock()\\n\",\n    \"msgs = {TopicPartition(topic, 0): [faulty_record, correct_record, correct_record]}\\n\",\n    \"\\n\",\n    \"mock_consumer.configure_mock(**{\\\"getmany.return_value\\\": f})\\n\",\n    \"mock_callback = Mock()\\n\",\n    \"\\n\",\n    \"exception = Exception(\\\"\\\")\\n\",\n    \"callback.side_effect = exception\\n\",\n    \"\\n\",\n    \"for is_async in [True, False]:\\n\",\n    \"    await _aiokafka_consumer_loop(\\n\",\n    \"        consumer=mock_consumer,\\n\",\n    \"        topic=topic,\\n\",\n    \"        decoder_fn=json_decoder,\\n\",\n    \"        max_buffer_size=100,\\n\",\n    \"        timeout_ms=10,\\n\",\n    \"        callback=asyncer.asyncify(mock_callback) if is_async else mock_callback,\\n\",\n    \"        msg_type=MyMessage,\\n\",\n    \"        is_shutting_down_f=is_shutting_down_f(mock_consumer.getmany),\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    assert mock_consumer.getmany.call_count == 1\\n\",\n    \"    mock_callback.assert_has_calls([call(msg), call(msg)])\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"46031397\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def sanitize_kafka_config(**kwargs: Any) -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"Sanitize Kafka config\\\"\\\"\\\"\\n\",\n    \"    return {k: \\\"*\\\" * len(v) if \\\"pass\\\" in k.lower() else v for k, v in kwargs.items()}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dfa2ec97\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"kwargs = {\\n\",\n    \"    \\\"bootstrap_servers\\\": \\\"whatever.cloud:9092\\\",\\n\",\n    \"    \\\"auto_offset_reset\\\": \\\"earliest\\\",\\n\",\n    \"    \\\"security_protocol\\\": \\\"SASL_SSL\\\",\\n\",\n    \"    \\\"sasl_mechanism\\\": \\\"PLAIN\\\",\\n\",\n    \"    \\\"sasl_plain_username\\\": \\\"username\\\",\\n\",\n    \"    \\\"sasl_plain_password\\\": \\\"password\\\",\\n\",\n    \"    \\\"ssl_context\\\": \\\"something\\\",\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"assert sanitize_kafka_config(**kwargs)[\\\"sasl_plain_password\\\"] == \\\"********\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ca7ba3a4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer)\\n\",\n    \"@delegates(_aiokafka_consumer_loop, keep=True)\\n\",\n    \"async def aiokafka_consumer_loop(\\n\",\n    \"    topic: str,\\n\",\n    \"    decoder_fn: Callable[[bytes, Type[BaseModel]], Any],\\n\",\n    \"    *,\\n\",\n    \"    timeout_ms: int = 100,\\n\",\n    \"    max_buffer_size: int = 100_000,\\n\",\n    \"    callback: ConsumeCallable,\\n\",\n    \"    msg_type: Union[Type[List[BaseModel]], Type[BaseModel]],\\n\",\n    \"    is_shutting_down_f: Callable[[], bool],\\n\",\n    \"    executor: Union[str, StreamExecutor, None] = None,\\n\",\n    \"    **kwargs: Any,\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"Consumer loop for infinite pooling of the AIOKafka consumer for new messages. Creates and starts AIOKafkaConsumer\\n\",\n    \"    and runs _aio_kafka_consumer loop fo infinite poling of the consumer for new messages.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: name of the topic to subscribe to\\n\",\n    \"        decoder_fn: Function to decode the messages consumed from the topic\\n\",\n    \"        callback: callback function to be called after decoding and parsing a consumed message\\n\",\n    \"        timeout_ms: Time to timeut the getmany request by the consumer\\n\",\n    \"        max_buffer_size: Maximum number of unconsumed messages in the callback buffer\\n\",\n    \"        msg_type: Type with `parse_json` method used for parsing a decoded message\\n\",\n    \"        is_shutting_down_f: Function for controlling the shutdown of consumer loop\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(f\\\"aiokafka_consumer_loop() starting...\\\")\\n\",\n    \"    try:\\n\",\n    \"        consumer = fastkafka._aiokafka_imports.AIOKafkaConsumer(\\n\",\n    \"            **kwargs,\\n\",\n    \"        )\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"aiokafka_consumer_loop(): Consumer created using the following parameters: {sanitize_kafka_config(**kwargs)}\\\"\\n\",\n    \"        )\\n\",\n    \"        \\n\",\n    \"        await consumer.start()\\n\",\n    \"        logger.info(\\\"aiokafka_consumer_loop(): Consumer started.\\\")\\n\",\n    \"        consumer.subscribe([topic])\\n\",\n    \"        logger.info(\\\"aiokafka_consumer_loop(): Consumer subscribed.\\\")\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            await _aiokafka_consumer_loop(\\n\",\n    \"                consumer=consumer,\\n\",\n    \"                topic=topic,\\n\",\n    \"                decoder_fn=decoder_fn,\\n\",\n    \"                max_buffer_size=max_buffer_size,\\n\",\n    \"                timeout_ms=timeout_ms,\\n\",\n    \"                callback=callback,\\n\",\n    \"                msg_type=msg_type,\\n\",\n    \"                is_shutting_down_f=is_shutting_down_f,\\n\",\n    \"                executor = executor,\\n\",\n    \"            )\\n\",\n    \"        finally:\\n\",\n    \"            await consumer.stop()\\n\",\n    \"            logger.info(f\\\"aiokafka_consumer_loop(): Consumer stopped.\\\")\\n\",\n    \"            logger.info(f\\\"aiokafka_consumer_loop() finished.\\\")\\n\",\n    \"    except Exception as e:\\n\",\n    \"        logger.error(\\n\",\n    \"            f\\\"aiokafka_consumer_loop(): unexpected exception raised: '{e.__repr__()}'\\\"\\n\",\n    \"        )\\n\",\n    \"        raise e\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dc89d47e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"b0f0bbb3ee124e0c8d59975fd0b37656\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000\\n\",\n      \"[INFO] __main__: msgs_received=2000\\n\",\n      \"[INFO] __main__: msgs_received=3000\\n\",\n      \"[INFO] __main__: msgs_received=4000\\n\",\n      \"[INFO] __main__: msgs_received=5000\\n\",\n      \"[INFO] __main__: msgs_received=6000\\n\",\n      \"[INFO] __main__: msgs_received=7000\\n\",\n      \"[INFO] __main__: msgs_received=8000\\n\",\n      \"[INFO] __main__: msgs_received=9000\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 228127...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 228127 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 227746...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 227746 terminated.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"fb5162a2670f4f93a5c61b5e8d5de0b6\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000\\n\",\n      \"[INFO] __main__: msgs_received=2000\\n\",\n      \"[INFO] __main__: msgs_received=3000\\n\",\n      \"[INFO] __main__: msgs_received=4000\\n\",\n      \"[INFO] __main__: msgs_received=5000\\n\",\n      \"[INFO] __main__: msgs_received=6000\\n\",\n      \"[INFO] __main__: msgs_received=7000\\n\",\n      \"[INFO] __main__: msgs_received=8000\\n\",\n      \"[INFO] __main__: msgs_received=9000\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 229358...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 229358 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 228977...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 228977 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"\\n\",\n    \"for executor in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"    topic = \\\"test_topic\\\"\\n\",\n    \"    msgs_sent = 9178\\n\",\n    \"    msgs = [\\n\",\n    \"        MyMessage(url=\\\"http://www.ai.com\\\", port=port).model_dump_json().encode(\\\"utf-8\\\")\\n\",\n    \"        for port in range(msgs_sent)\\n\",\n    \"    ]\\n\",\n    \"    msgs_received = 0\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async def count_msg(msg: MyMessage):\\n\",\n    \"        global msgs_received\\n\",\n    \"        msgs_received = msgs_received + 1\\n\",\n    \"        if msgs_received % 1000 == 0:\\n\",\n    \"            logger.info(f\\\"{msgs_received=}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"        await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"        await aiokafka_consumer_loop(\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=json_decoder,\\n\",\n    \"            auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"            callback=count_msg,\\n\",\n    \"            msg_type=MyMessage,\\n\",\n    \"            is_shutting_down_f=true_after(2),\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            executor=executor,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert msgs_sent == msgs_received, f\\\"{msgs_sent} != {msgs_received}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fde91e3e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"cf8d3ff86a5c4328b69a58fa64bf008b\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=9178\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 230586...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 230586 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 230206...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 230206 terminated.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"4f2bfe58038548168f841826c32b63ba\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=9178\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 231816...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 231816 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 231435...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 231435 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"for executor in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"    topic = \\\"test_topic\\\"\\n\",\n    \"    msgs_sent = 9178\\n\",\n    \"    msgs = [\\n\",\n    \"        MyMessage(url=\\\"http://www.ai.com\\\", port=port).model_dump_json().encode(\\\"utf-8\\\")\\n\",\n    \"        for port in range(msgs_sent)\\n\",\n    \"    ]\\n\",\n    \"    msgs_received = 0\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async def count_msg(msg: List[MyMessage], meta: List[EventMetadata]):\\n\",\n    \"        global msgs_received\\n\",\n    \"        msgs_received = msgs_received + len(msg)\\n\",\n    \"        logger.info(f\\\"{msgs_received=}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"        await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"        await aiokafka_consumer_loop(\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=json_decoder,\\n\",\n    \"            auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"            callback=count_msg,\\n\",\n    \"            msg_type=List[MyMessage],\\n\",\n    \"            is_shutting_down_f=true_after(2),\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            executor=executor,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert msgs_sent == msgs_received, f\\\"{msgs_sent} != {msgs_received}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e47e37ac\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"3ba8321cf4ef4461b8042a3d79ce3d22\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000, meta=EventMetadata(topic='test_topic', partition=0, offset=999, timestamp=1688404081071, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":999}', checksum=None, serialized_key_size=-1, serialized_value_size=39, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=2000, meta=EventMetadata(topic='test_topic', partition=0, offset=1999, timestamp=1688404081080, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":1999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=3000, meta=EventMetadata(topic='test_topic', partition=0, offset=2999, timestamp=1688404081087, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":2999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=4000, meta=EventMetadata(topic='test_topic', partition=0, offset=3999, timestamp=1688404081095, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":3999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=5000, meta=EventMetadata(topic='test_topic', partition=0, offset=4999, timestamp=1688404081102, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":4999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=6000, meta=EventMetadata(topic='test_topic', partition=0, offset=5999, timestamp=1688404081110, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":5999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=7000, meta=EventMetadata(topic='test_topic', partition=0, offset=6999, timestamp=1688404081116, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":6999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=8000, meta=EventMetadata(topic='test_topic', partition=0, offset=7999, timestamp=1688404081123, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":7999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=9000, meta=EventMetadata(topic='test_topic', partition=0, offset=8999, timestamp=1688404081130, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":8999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 233044...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 233044 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 232663...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 232663 terminated.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"727b139b07e244af90546172944ad4db\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000, meta=EventMetadata(topic='test_topic', partition=0, offset=999, timestamp=1688404089840, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":999}', checksum=None, serialized_key_size=-1, serialized_value_size=39, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=2000, meta=EventMetadata(topic='test_topic', partition=0, offset=1999, timestamp=1688404089848, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":1999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=3000, meta=EventMetadata(topic='test_topic', partition=0, offset=2999, timestamp=1688404089854, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":2999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=4000, meta=EventMetadata(topic='test_topic', partition=0, offset=3999, timestamp=1688404089860, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":3999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=5000, meta=EventMetadata(topic='test_topic', partition=0, offset=4999, timestamp=1688404089867, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":4999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=6000, meta=EventMetadata(topic='test_topic', partition=0, offset=5999, timestamp=1688404089873, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":5999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=7000, meta=EventMetadata(topic='test_topic', partition=0, offset=6999, timestamp=1688404089879, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":6999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=8000, meta=EventMetadata(topic='test_topic', partition=0, offset=7999, timestamp=1688404089886, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":7999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: msgs_received=9000, meta=EventMetadata(topic='test_topic', partition=0, offset=8999, timestamp=1688404089892, timestamp_type=0, key=None, value=b'{\\\"url\\\":\\\"http://www.ai.com/\\\",\\\"port\\\":8999}', checksum=None, serialized_key_size=-1, serialized_value_size=40, headers=())\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 234272...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 234272 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 233892...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 233892 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test with meta\\n\",\n    \"\\n\",\n    \"for executor in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"    topic = \\\"test_topic\\\"\\n\",\n    \"    msgs_sent = 9178\\n\",\n    \"    msgs = [\\n\",\n    \"        MyMessage(url=\\\"http://www.ai.com\\\", port=port).model_dump_json().encode(\\\"utf-8\\\")\\n\",\n    \"        for port in range(msgs_sent)\\n\",\n    \"    ]\\n\",\n    \"    msgs_received = 0\\n\",\n    \"    meta_samples = []\\n\",\n    \"\\n\",\n    \"    async def count_msg(msg: MyMessage, meta: EventMetadata):\\n\",\n    \"        global msgs_received\\n\",\n    \"        msgs_received = msgs_received + 1\\n\",\n    \"        if msgs_received % 1000 == 0:\\n\",\n    \"            meta_samples.append(meta)\\n\",\n    \"            logger.info(f\\\"{msgs_received=}, {meta=}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"        await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"        await aiokafka_consumer_loop(\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=json_decoder,\\n\",\n    \"            auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"            callback=count_msg,\\n\",\n    \"            msg_type=MyMessage,\\n\",\n    \"            is_shutting_down_f=true_after(2),\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            executor = executor\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert msgs_sent == msgs_received, f\\\"{msgs_sent} != {msgs_received}\\\"\\n\",\n    \"        assert all(isinstance(meta, EventMetadata) for meta in meta_samples)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d6484e52\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"18826d9659c54c669419983850303cc8\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000\\n\",\n      \"[INFO] __main__: msgs_received=2000\\n\",\n      \"[INFO] __main__: msgs_received=3000\\n\",\n      \"[INFO] __main__: msgs_received=4000\\n\",\n      \"[INFO] __main__: msgs_received=5000\\n\",\n      \"[INFO] __main__: msgs_received=6000\\n\",\n      \"[INFO] __main__: msgs_received=7000\\n\",\n      \"[INFO] __main__: msgs_received=8000\\n\",\n      \"[INFO] __main__: msgs_received=9000\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 235506...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 235506 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 235124...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 235124 terminated.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"29a73a0af3b54d858a814c1f0d861d3d\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000\\n\",\n      \"[INFO] __main__: msgs_received=2000\\n\",\n      \"[INFO] __main__: msgs_received=3000\\n\",\n      \"[INFO] __main__: msgs_received=4000\\n\",\n      \"[INFO] __main__: msgs_received=5000\\n\",\n      \"[INFO] __main__: msgs_received=6000\\n\",\n      \"[INFO] __main__: msgs_received=7000\\n\",\n      \"[INFO] __main__: msgs_received=8000\\n\",\n      \"[INFO] __main__: msgs_received=9000\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 236738...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 236738 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 236357...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 236357 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test with avro_decoder\\n\",\n    \"\\n\",\n    \"for executor in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"    topic = \\\"test_topic\\\"\\n\",\n    \"    msgs_sent = 9178\\n\",\n    \"    msgs = [\\n\",\n    \"        avro_encoder(MyMessage(url=\\\"http://www.ai.com\\\", port=port))\\n\",\n    \"        for port in range(msgs_sent)\\n\",\n    \"    ]\\n\",\n    \"    msgs_received = 0\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async def count_msg(msg: MyMessage):\\n\",\n    \"        global msgs_received\\n\",\n    \"        msgs_received = msgs_received + 1\\n\",\n    \"        if msgs_received % 1000 == 0:\\n\",\n    \"            logger.info(f\\\"{msgs_received=}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"        await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"        await aiokafka_consumer_loop(\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=avro_decoder,\\n\",\n    \"            auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"            callback=count_msg,\\n\",\n    \"            msg_type=MyMessage,\\n\",\n    \"            is_shutting_down_f=true_after(2),\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            executor=executor,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert msgs_sent == msgs_received, f\\\"{msgs_sent} != {msgs_received}\\\"\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"94ea86a8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"cd276ce4985d4bd3a83130625e28a9cd\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000\\n\",\n      \"[INFO] __main__: msgs_received=2000\\n\",\n      \"[INFO] __main__: msgs_received=3000\\n\",\n      \"[INFO] __main__: msgs_received=4000\\n\",\n      \"[INFO] __main__: msgs_received=5000\\n\",\n      \"[INFO] __main__: msgs_received=6000\\n\",\n      \"[INFO] __main__: msgs_received=7000\\n\",\n      \"[INFO] __main__: msgs_received=8000\\n\",\n      \"[INFO] __main__: msgs_received=9000\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 237967...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 237967 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 237585...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 237585 terminated.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"4937733125b945ad9dace85f5b570516\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/9178 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: msgs_received=1000\\n\",\n      \"[INFO] __main__: msgs_received=2000\\n\",\n      \"[INFO] __main__: msgs_received=3000\\n\",\n      \"[INFO] __main__: msgs_received=4000\\n\",\n      \"[INFO] __main__: msgs_received=5000\\n\",\n      \"[INFO] __main__: msgs_received=6000\\n\",\n      \"[INFO] __main__: msgs_received=7000\\n\",\n      \"[INFO] __main__: msgs_received=8000\\n\",\n      \"[INFO] __main__: msgs_received=9000\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 239194...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 239194 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 238813...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 238813 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test with avro_decoder and meta\\n\",\n    \"\\n\",\n    \"for executor in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"    topic = \\\"test_topic\\\"\\n\",\n    \"    msgs_sent = 9178\\n\",\n    \"    msgs = [\\n\",\n    \"        avro_encoder(MyMessage(url=\\\"http://www.ai.com\\\", port=port))\\n\",\n    \"        for port in range(msgs_sent)\\n\",\n    \"    ]\\n\",\n    \"    msgs_received = 0\\n\",\n    \"    meta_samples = []\\n\",\n    \"\\n\",\n    \"    async def count_msg(msg: MyMessage, meta: EventMetadata):\\n\",\n    \"        global msgs_received\\n\",\n    \"        msgs_received = msgs_received + 1\\n\",\n    \"        if msgs_received % 1000 == 0:\\n\",\n    \"            logger.info(f\\\"{msgs_received=}\\\")\\n\",\n    \"            meta_samples.append(meta)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"        await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"        await aiokafka_consumer_loop(\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=avro_decoder,\\n\",\n    \"            auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"            callback=count_msg,\\n\",\n    \"            msg_type=MyMessage,\\n\",\n    \"            is_shutting_down_f=true_after(2),\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            executor=executor,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert msgs_sent == msgs_received, f\\\"{msgs_sent} != {msgs_received}\\\"\\n\",\n    \"        assert all(isinstance(meta, EventMetadata) for meta in meta_samples)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3cd9d9d8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"02bc2b22db4a4832aafe07b9022a74f4\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"d41ef425a1194d138ca92b02c0a4cad0\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"consuming messages:   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"Messages processed: 50,000\\n\",\n      \"Time              : 2.36 s\\n\",\n      \"Throughput.       : 21,212 msg/s\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 240422...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 240422 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 240041...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 240041 terminated.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"fb2981e9f67047bc8df96e629cc0c505\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"de913723000e49adbe63c93ef447f1a7\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"consuming messages:   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"Messages processed: 50,000\\n\",\n      \"Time              : 1.65 s\\n\",\n      \"Throughput.       : 30,371 msg/s\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 241652...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 241652 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 241271...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 241271 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"for executor in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"    topic = \\\"test_topic\\\"\\n\",\n    \"    msgs_sent = 500_00\\n\",\n    \"    msgs = [\\n\",\n    \"        MyMessage(url=\\\"http://www.ai.com\\\", port=port).model_dump_json().encode(\\\"utf-8\\\")\\n\",\n    \"        for port in range(msgs_sent)\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async def count_msg(msg: MyMessage):\\n\",\n    \"        pbar.update(1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    def _is_shutting_down_f():\\n\",\n    \"        return pbar.n >= pbar.total\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"        await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"        with tqdm(total=msgs_sent, desc=\\\"consuming messages\\\") as _pbar:\\n\",\n    \"            global pbar\\n\",\n    \"            pbar = _pbar\\n\",\n    \"\\n\",\n    \"            start = datetime.now()\\n\",\n    \"            await aiokafka_consumer_loop(\\n\",\n    \"                topic=topic,\\n\",\n    \"                decoder_fn=json_decoder,\\n\",\n    \"                auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"                callback=count_msg,\\n\",\n    \"                msg_type=MyMessage,\\n\",\n    \"                is_shutting_down_f=_is_shutting_down_f,\\n\",\n    \"                bootstrap_servers=bootstrap_server,\\n\",\n    \"                executor=executor\\n\",\n    \"            )\\n\",\n    \"            t = (datetime.now() - start) / timedelta(seconds=1)\\n\",\n    \"            thrp = pbar.n / t\\n\",\n    \"\\n\",\n    \"            print(f\\\"Messages processed: {pbar.n:,d}\\\")\\n\",\n    \"            print(f\\\"Time              : {t:.2f} s\\\")\\n\",\n    \"            print(f\\\"Throughput.       : {thrp:,.0f} msg/s\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ff5ada17\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"1dcc60d607ca40d282062e5346709535\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"3daf4e7d24d54342836d35c637035c53\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"consuming messages:   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"Messages processed: 50,000\\n\",\n      \"Time              : 9.75 s\\n\",\n      \"Throughput.       : 5,127 msg/s\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 242885...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 242885 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 242503...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 242503 terminated.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"6180748bfe2c46559df2d40406b90a2a\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"8e232f75af3d44789a93157e5e88b974\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"consuming messages:   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"Messages processed: 50,000\\n\",\n      \"Time              : 8.43 s\\n\",\n      \"Throughput.       : 5,934 msg/s\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 244120...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 244120 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 243738...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 243738 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test with avro_decoder\\n\",\n    \"\\n\",\n    \"for executor in [\\\"DynamicTaskExecutor\\\", \\\"SequentialExecutor\\\"]:\\n\",\n    \"    topic = \\\"test_topic\\\"\\n\",\n    \"    msgs_sent = 500_00\\n\",\n    \"    msgs = [\\n\",\n    \"        avro_encoder(MyMessage(url=\\\"http://www.ai.com\\\", port=port))\\n\",\n    \"        for port in range(msgs_sent)\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async def count_msg(msg: MyMessage):\\n\",\n    \"        pbar.update(1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    def _is_shutting_down_f():\\n\",\n    \"        return pbar.n >= pbar.total\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"    async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"        await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"        with tqdm(total=msgs_sent, desc=\\\"consuming messages\\\") as _pbar:\\n\",\n    \"            global pbar\\n\",\n    \"            pbar = _pbar\\n\",\n    \"\\n\",\n    \"            start = datetime.now()\\n\",\n    \"            await aiokafka_consumer_loop(\\n\",\n    \"                topic=topic,\\n\",\n    \"                decoder_fn=avro_decoder,\\n\",\n    \"                auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"                callback=count_msg,\\n\",\n    \"                msg_type=MyMessage,\\n\",\n    \"                is_shutting_down_f=_is_shutting_down_f,\\n\",\n    \"                bootstrap_servers=bootstrap_server,\\n\",\n    \"                executor=executor\\n\",\n    \"            )\\n\",\n    \"            t = (datetime.now() - start) / timedelta(seconds=1)\\n\",\n    \"            thrp = pbar.n / t\\n\",\n    \"\\n\",\n    \"            print(f\\\"Messages processed: {pbar.n:,d}\\\")\\n\",\n    \"            print(f\\\"Time              : {t:.2f} s\\\")\\n\",\n    \"            print(f\\\"Throughput.       : {thrp:,.0f} msg/s\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"ac591139\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Consumer loop benchmark and coroutine sanity check\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"810bb69a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:11992\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"38effce230a74bffa5882effc66009f2\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_topic':   0%|          | 0/50000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"69f36e173de347e88287113ed705b33b\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"consuming messages:   0%|          | 0/100000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'bootstrap_servers': '127.0.0.1:11992'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_topic'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'test_topic'}\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_topic': 1}. \\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] __main__: aiokafka_consumer_loop() finished.\\n\",\n      \"Messages processed: 100,000\\n\",\n      \"Time              : 5.07 s\\n\",\n      \"Throughput.       : 19,721 msg/s\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 245352...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 245352 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 244970...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 244970 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"topic = \\\"test_topic\\\"\\n\",\n    \"msgs_sent = 500_00\\n\",\n    \"msgs = [\\n\",\n    \"    MyMessage(url=\\\"http://www.ai.com\\\", port=port).model_dump_json().encode(\\\"utf-8\\\")\\n\",\n    \"    for port in range(msgs_sent)\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def count_msg(msg: MyMessage):\\n\",\n    \"    pbar.update(1)\\n\",\n    \"    await asyncio.sleep(1)\\n\",\n    \"    pbar.update(1)\\n\",\n    \"\\n\",\n    \"def _is_shutting_down_f():\\n\",\n    \"    return pbar.n >= pbar.total\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with ApacheKafkaBroker(topics=[topic], listener_port=11992) as bootstrap_server:\\n\",\n    \"    await produce_messages(topic=topic, bootstrap_servers=bootstrap_server, msgs=msgs)\\n\",\n    \"    with tqdm(total=msgs_sent*2, desc=\\\"consuming messages\\\") as _pbar:\\n\",\n    \"        global pbar\\n\",\n    \"        pbar = _pbar\\n\",\n    \"\\n\",\n    \"        start = datetime.now()\\n\",\n    \"        await aiokafka_consumer_loop(\\n\",\n    \"            topic=topic,\\n\",\n    \"            decoder_fn=json_decoder,\\n\",\n    \"            auto_offset_reset=\\\"earliest\\\",\\n\",\n    \"            callback=count_msg,\\n\",\n    \"            msg_type=MyMessage,\\n\",\n    \"            is_shutting_down_f=_is_shutting_down_f,\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            executor = \\\"DynamicTaskExecutor\\\"\\n\",\n    \"        )\\n\",\n    \"        t = (datetime.now() - start) / timedelta(seconds=1)\\n\",\n    \"        thrp = pbar.n / t\\n\",\n    \"\\n\",\n    \"        print(f\\\"Messages processed: {pbar.n:,d}\\\")\\n\",\n    \"        print(f\\\"Time              : {t:.2f} s\\\")\\n\",\n    \"        print(f\\\"Throughput.       : {thrp:,.0f} msg/s\\\")\\n\",\n    \"        \\n\",\n    \"assert t < 15\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d9067015\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/013_ProducerDecorator.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"163ff75c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.producer_decorator\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"563b0aba\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import functools\\n\",\n    \"import logging\\n\",\n    \"import random\\n\",\n    \"import time\\n\",\n    \"from asyncio import iscoroutinefunction  # do not use the version from inspect\\n\",\n    \"from dataclasses import dataclass\\n\",\n    \"from functools import partial\\n\",\n    \"from inspect import Parameter\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from aiokafka import AIOKafkaProducer\\n\",\n    \"from aiokafka.errors import KafkaTimeoutError, RequestTimedOutError\\n\",\n    \"from aiokafka.producer.message_accumulator import BatchBuilder\\n\",\n    \"from pydantic import BaseModel\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import get_logger, cached_log\\n\",\n    \"from fastkafka._components.meta import export\\n\",\n    \"from fastkafka._components.helpers import remove_suffix\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e222a73f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import asyncio\\n\",\n    \"import unittest\\n\",\n    \"from contextlib import asynccontextmanager, contextmanager\\n\",\n    \"from itertools import product\\n\",\n    \"from unittest.mock import ANY, Mock, call\\n\",\n    \"from _pytest import monkeypatch\\n\",\n    \"\\n\",\n    \"from pydantic import Field\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka.encoder import avro_encoder, json_encoder\\n\",\n    \"from fastkafka._testing.in_memory_broker import InMemoryBroker, InMemoryProducer, InMemoryConsumer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"436c6e0a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fae4d87e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d4b4e5e4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"BaseSubmodel = TypeVar(\\\"BaseSubmodel\\\", bound=Union[List[BaseModel], BaseModel])\\n\",\n    \"BaseSubmodel\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@dataclass\\n\",\n    \"@export(\\\"fastkafka\\\")\\n\",\n    \"class KafkaEvent(Generic[BaseSubmodel]):\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel\\n\",\n    \"\\n\",\n    \"    Attributes:\\n\",\n    \"        message (BaseSubmodel): The message contained in the Kafka event, can be of type pydantic.BaseModel.\\n\",\n    \"        key (bytes, optional): The optional key used to identify the Kafka event.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    message: BaseSubmodel\\n\",\n    \"    key: Optional[bytes] = None\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e2e0166e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"event = KafkaEvent(\\\"Some message\\\")\\n\",\n    \"assert event.message == \\\"Some message\\\"\\n\",\n    \"assert event.key == None\\n\",\n    \"\\n\",\n    \"event = KafkaEvent(\\\"Some message\\\", b\\\"123\\\")\\n\",\n    \"assert event.message == \\\"Some message\\\"\\n\",\n    \"assert event.key == b\\\"123\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4e7de730\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def unwrap_from_kafka_event(var_type: Union[Type, Parameter]) -> Union[Type, Parameter]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Unwraps the type from a KafkaEvent.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        var_type: Type to unwrap.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Type: Unwrapped type if the given type is a KafkaEvent, otherwise returns the same type.\\n\",\n    \"\\n\",\n    \"    Example:\\n\",\n    \"        - Input: KafkaEvent[str]\\n\",\n    \"          Output: str\\n\",\n    \"        - Input: int\\n\",\n    \"          Output: int\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if hasattr(var_type, \\\"__origin__\\\") and var_type.__origin__ == KafkaEvent:\\n\",\n    \"        return var_type.__args__[0]  # type: ignore\\n\",\n    \"    else:\\n\",\n    \"        return var_type\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7277bcf0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert unwrap_from_kafka_event(KafkaEvent[int]) == int\\n\",\n    \"assert unwrap_from_kafka_event(int) == int\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"21d981cd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"ProduceReturnTypes = Union[\\n\",\n    \"    BaseModel, KafkaEvent[BaseModel], List[BaseModel], KafkaEvent[List[BaseModel]]\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"ProduceCallable = Union[\\n\",\n    \"    Callable[..., ProduceReturnTypes], Callable[..., Awaitable[ProduceReturnTypes]]\\n\",\n    \"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a66cc7a0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# # | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# def _to_json_utf8(o: Any) -> bytes:\\n\",\n    \"#     \\\"\\\"\\\"Converts to JSON and then encodes with UTF-8\\\"\\\"\\\"\\n\",\n    \"#     if hasattr(o, \\\"json\\\"):\\n\",\n    \"#         return o.json().encode(\\\"utf-8\\\")  # type: ignore\\n\",\n    \"#     else:\\n\",\n    \"#         return json.dumps(o).encode(\\\"utf-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4bd6af9b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# assert _to_json_utf8({\\\"a\\\": 1, \\\"b\\\": [2, 3]}) == b'{\\\"a\\\": 1, \\\"b\\\": [2, 3]}'\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class ExampleMsg(BaseModel):\\n\",\n    \"    name: str = Field()\\n\",\n    \"    age: int\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# assert _to_json_utf8(ExampleMsg(name=\\\"Davor\\\", age=12)) == b'{\\\"name\\\": \\\"Davor\\\", \\\"age\\\": 12}'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4eb9ce04\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _wrap_in_event(\\n\",\n    \"    message: Union[BaseModel, List[BaseModel], KafkaEvent]\\n\",\n    \") -> KafkaEvent:\\n\",\n    \"    return message if type(message) == KafkaEvent else KafkaEvent(message)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"50cab521\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"message = ExampleMsg(name=\\\"Davor\\\", age=12)\\n\",\n    \"wrapped = _wrap_in_event(message)\\n\",\n    \"\\n\",\n    \"assert type(wrapped) == KafkaEvent\\n\",\n    \"assert wrapped.message == message\\n\",\n    \"assert wrapped.key == None\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d82893da\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"message = KafkaEvent(ExampleMsg(name=\\\"Davor\\\", age=12), b\\\"123\\\")\\n\",\n    \"wrapped = _wrap_in_event(message)\\n\",\n    \"\\n\",\n    \"assert type(wrapped) == KafkaEvent\\n\",\n    \"assert wrapped.message == message.message\\n\",\n    \"assert wrapped.key == b\\\"123\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"18abd215\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def release_callback(\\n\",\n    \"    fut: asyncio.Future, topic: str, wrapped_val: KafkaEvent[BaseModel]\\n\",\n    \") -> None:\\n\",\n    \"    if fut.exception() is not None:\\n\",\n    \"        cached_log(\\n\",\n    \"            logger,\\n\",\n    \"            f\\\"release_callback(): Exception {fut.exception()=}, raised when producing {wrapped_val.message=} to {topic=}\\\",\\n\",\n    \"            level=logging.WARNING,\\n\",\n    \"            timeout=1,\\n\",\n    \"            log_id=\\\"release_callback()\\\"\\n\",\n    \"        )\\n\",\n    \"    pass\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bd83badb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def produce_single(  # type: ignore\\n\",\n    \"    producer: AIOKafkaProducer,\\n\",\n    \"    topic: str,\\n\",\n    \"    encoder_fn: Callable[[BaseModel], bytes],\\n\",\n    \"    wrapped_val: KafkaEvent[BaseModel],\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Sends a single message to the Kafka producer.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        producer (AIOKafkaProducer): The Kafka producer object.\\n\",\n    \"        topic (str): The topic to which the message will be sent.\\n\",\n    \"        encoder_fn (Callable[[BaseModel], bytes]): The encoding function to encode the message.\\n\",\n    \"        wrapped_val (KafkaEvent[BaseModel]): The wrapped Kafka event containing the message.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    while True:\\n\",\n    \"        try:\\n\",\n    \"            fut = await producer.send(\\n\",\n    \"                topic, encoder_fn(wrapped_val.message), key=wrapped_val.key\\n\",\n    \"            )\\n\",\n    \"            fut.add_done_callback(partial(release_callback, topic=topic, wrapped_val=wrapped_val))\\n\",\n    \"            break\\n\",\n    \"        except KafkaTimeoutError as e:\\n\",\n    \"            logger.warning(f\\\"produce_single(): Exception {e=} raised when producing {wrapped_val.message} to {topic=}, sleeping for 1 second and retrying..\\\")\\n\",\n    \"            await asyncio.sleep(1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f7c363dd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class FakeProducer:\\n\",\n    \"    def __init__(self, return_future: asyncio.Future = None):\\n\",\n    \"        self.counter = 0\\n\",\n    \"        if return_future is None:\\n\",\n    \"            return_future = asyncio.Future()\\n\",\n    \"            return_future.set_result(\\\"Some result\\\")\\n\",\n    \"\\n\",\n    \"        self.return_future = return_future\\n\",\n    \"\\n\",\n    \"    async def send(self, *args, **kwargs):\\n\",\n    \"        if self.counter < 5:\\n\",\n    \"            self.counter += 1\\n\",\n    \"            raise KafkaTimeoutError()\\n\",\n    \"        else:\\n\",\n    \"            return self.return_future\\n\",\n    \"\\n\",\n    \"    async def send_batch(self, *args, **kwargs):\\n\",\n    \"        return await self.send()\\n\",\n    \"\\n\",\n    \"    def create_batch(self):\\n\",\n    \"        return unittest.mock.MagicMock()\\n\",\n    \"\\n\",\n    \"    def add_done_callback(self, *args):\\n\",\n    \"        return\\n\",\n    \"\\n\",\n    \"    async def partitions_for(self, *args):\\n\",\n    \"        return [\\\"partition_1\\\", \\\"partition_2\\\"]\\n\",\n    \"    \\n\",\n    \"    def start(*args, **kwargs):\\n\",\n    \"        return\\n\",\n    \"    \\n\",\n    \"    def stop(*args, **kwargs):\\n\",\n    \"        return\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c3fb499d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"await produce_single(\\n\",\n    \"    FakeProducer(),\\n\",\n    \"    topic=\\\"test_topic\\\",\\n\",\n    \"    encoder_fn=json_encoder,\\n\",\n    \"    wrapped_val=KafkaEvent(message=ExampleMsg(name=\\\"Davor\\\", age=12), key=b\\\"test\\\"),\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ccb950e1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: produce_single(): Exception e=KafkaTimeoutError() raised when producing name='Davor' age=12 to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: release_callback(): Exception fut.exception()=RequestTimedOutError(), raised when producing wrapped_val.message=ExampleMsg(name='Davor', age=12) to topic='test_topic'\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"timeout_future = asyncio.Future()\\n\",\n    \"timeout_future.set_exception(RequestTimedOutError())\\n\",\n    \"\\n\",\n    \"await produce_single(\\n\",\n    \"    FakeProducer(return_future=timeout_future),\\n\",\n    \"    topic=\\\"test_topic\\\",\\n\",\n    \"    encoder_fn=json_encoder,\\n\",\n    \"    wrapped_val=KafkaEvent(message=ExampleMsg(name=\\\"Davor\\\", age=12), key=b\\\"test\\\"),\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b6a9221e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    ProducerClass = InMemoryProducer(broker)\\n\",\n    \"    producer = ProducerClass()\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    await produce_single(\\n\",\n    \"        producer,\\n\",\n    \"        topic=\\\"test_topic\\\",\\n\",\n    \"        encoder_fn=json_encoder,\\n\",\n    \"        wrapped_val=KafkaEvent(message=ExampleMsg(name=\\\"Davor\\\", age=12), key=b\\\"test\\\"),\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9e20ee70\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def send_batch(  # type: ignore\\n\",\n    \"    producer: AIOKafkaProducer, topic: str, batch: BatchBuilder, key: Optional[bytes]\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Sends a batch of messages to the Kafka producer.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        producer (AIOKafkaProducer): The Kafka producer object.\\n\",\n    \"        topic (str): The topic to which the messages will be sent.\\n\",\n    \"        batch (BatchBuilder): The batch builder object containing the messages.\\n\",\n    \"        key (Optional[bytes]): The optional key used to identify the batch of messages.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    partitions = await producer.partitions_for(topic)\\n\",\n    \"    if key == None:\\n\",\n    \"        partition = random.choice(tuple(partitions))  # nosec\\n\",\n    \"    else:\\n\",\n    \"        partition = producer._partition(topic, None, None, None, key, None)\\n\",\n    \"    while True:\\n\",\n    \"        try:\\n\",\n    \"            await producer.send_batch(batch, topic, partition=partition)\\n\",\n    \"            break\\n\",\n    \"        except KafkaTimeoutError as e:\\n\",\n    \"            logger.warning(f\\\"send_batch(): Exception {e} raised when producing {batch} to {topic=}, sleeping for 1 second and retrying..\\\")\\n\",\n    \"            await asyncio.sleep(1)\\n\",\n    \"    \\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def produce_batch(  # type: ignore\\n\",\n    \"    producer: AIOKafkaProducer,\\n\",\n    \"    topic: str,\\n\",\n    \"    encoder_fn: Callable[[BaseModel], bytes],\\n\",\n    \"    wrapped_val: KafkaEvent[List[BaseModel]],\\n\",\n    \") -> ProduceReturnTypes:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Sends a batch of messages to the Kafka producer.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        producer (AIOKafkaProducer): The Kafka producer object.\\n\",\n    \"        topic (str): The topic to which the messages will be sent.\\n\",\n    \"        encoder_fn (Callable[[BaseModel], bytes]): The encoding function to encode the messages.\\n\",\n    \"        wrapped_val (KafkaEvent[List[BaseModel]]): The wrapped Kafka event containing the list of messages.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        ProduceReturnTypes: The return value from the decorated function.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    batch = producer.create_batch()\\n\",\n    \"\\n\",\n    \"    for message in wrapped_val.message:\\n\",\n    \"        metadata = batch.append(\\n\",\n    \"            key=wrapped_val.key,\\n\",\n    \"            value=encoder_fn(message),\\n\",\n    \"            timestamp=int(time.time() * 1000),\\n\",\n    \"        )\\n\",\n    \"        if metadata == None:\\n\",\n    \"            # send batch\\n\",\n    \"            await send_batch(producer, topic, batch, wrapped_val.key)\\n\",\n    \"            # create new batch\\n\",\n    \"            batch = producer.create_batch()\\n\",\n    \"            batch.append(\\n\",\n    \"                key=None, value=encoder_fn(message), timestamp=int(time.time() * 1000)\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"    await send_batch(producer, topic, batch, wrapped_val.key)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7725fd2d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[WARNING] __main__: send_batch(): Exception KafkaTimeoutError raised when producing <MagicMock id='140539229227984'> to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: send_batch(): Exception KafkaTimeoutError raised when producing <MagicMock id='140539229227984'> to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: send_batch(): Exception KafkaTimeoutError raised when producing <MagicMock id='140539229227984'> to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: send_batch(): Exception KafkaTimeoutError raised when producing <MagicMock id='140539229227984'> to topic='test_topic', sleeping for 1 second and retrying..\\n\",\n      \"[WARNING] __main__: send_batch(): Exception KafkaTimeoutError raised when producing <MagicMock id='140539229227984'> to topic='test_topic', sleeping for 1 second and retrying..\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"msgs = [ExampleMsg(name=\\\"Davor\\\", age=12) for _ in range(500)]\\n\",\n    \"\\n\",\n    \"await produce_batch(\\n\",\n    \"    FakeProducer(),\\n\",\n    \"    topic=\\\"test_topic\\\",\\n\",\n    \"    encoder_fn=json_encoder,\\n\",\n    \"    wrapped_val=KafkaEvent(message=msgs, key=None),\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ee8b2b92\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"msgs = [ExampleMsg(name=\\\"Davor\\\", age=12) for _ in range(500)]\\n\",\n    \"    \\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    ProducerClass = InMemoryProducer(broker)\\n\",\n    \"    producer = ProducerClass()\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    await produce_batch(\\n\",\n    \"        producer,\\n\",\n    \"        topic=\\\"test_topic\\\",\\n\",\n    \"        encoder_fn=json_encoder,\\n\",\n    \"        wrapped_val=KafkaEvent(message=msgs, key=b\\\"test\\\"),\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"87f60d23\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def producer_decorator(\\n\",\n    \"    producer_store: Dict[str, Any],\\n\",\n    \"    func: ProduceCallable,\\n\",\n    \"    topic_key: str,\\n\",\n    \"    encoder_fn: Callable[[BaseModel], bytes],\\n\",\n    \") -> ProduceCallable:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decorator for Kafka producer functions.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        producer_store (Dict[str, Any]): Dictionary to store the Kafka producer objects.\\n\",\n    \"        func (ProduceCallable): The function to be decorated.\\n\",\n    \"        topic_key (str): The key used to identify the topic.\\n\",\n    \"        encoder_fn (Callable[[BaseModel], bytes]): The encoding function to encode the messages.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        ProduceCallable: The decorated function.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If the decorated function is synchronous.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    @functools.wraps(func)\\n\",\n    \"    async def _produce_async(\\n\",\n    \"        *args: List[Any],\\n\",\n    \"        topic_key: str = topic_key,\\n\",\n    \"        encoder_fn: Callable[[BaseModel], bytes] = encoder_fn,\\n\",\n    \"        producer_store: Dict[str, Any] = producer_store,\\n\",\n    \"        f: Callable[..., Awaitable[ProduceReturnTypes]] = func,  # type: ignore\\n\",\n    \"        **kwargs: Any,\\n\",\n    \"    ) -> ProduceReturnTypes:\\n\",\n    \"        return_val = await f(*args, **kwargs)\\n\",\n    \"        wrapped_val = _wrap_in_event(return_val)\\n\",\n    \"        _, producer, _, _ = producer_store[topic_key]\\n\",\n    \"        topic = remove_suffix(topic_key)\\n\",\n    \"\\n\",\n    \"        if isinstance(wrapped_val.message, list):\\n\",\n    \"            await produce_batch(producer, topic, encoder_fn, wrapped_val)\\n\",\n    \"        else:\\n\",\n    \"            await produce_single(producer, topic, encoder_fn, wrapped_val)\\n\",\n    \"        return return_val\\n\",\n    \"\\n\",\n    \"    if not iscoroutinefunction(func):\\n\",\n    \"        raise ValueError(\\n\",\n    \"            \\\"Synchronous functions are not supported for produce operation\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    return _produce_async\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e76940b8\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class MockMsg(BaseModel):\\n\",\n    \"    name: str = \\\"Micky Mouse\\\"\\n\",\n    \"    id: int = 123\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"mock_msg = MockMsg()\\n\",\n    \"\\n\",\n    \"topic = \\\"test_topic_1\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7d950429\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"async def _f() -> None:\\n\",\n    \"#     print(\\\"Mock called\\\")\\n\",\n    \"    loop = asyncio.get_running_loop()\\n\",\n    \"\\n\",\n    \"    # Create a new Future object.\\n\",\n    \"    return loop.create_future()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@contextmanager\\n\",\n    \"def mock_InMemoryProducer_send() -> Generator[Mock, None, None]:\\n\",\n    \"    \\\"\\\"\\\"Mocks **send** method of **InMemoryProducer**\\\"\\\"\\\"\\n\",\n    \"    with unittest.mock.patch(\\n\",\n    \"        \\\"fastkafka._testing.in_memory_broker.InMemoryProducer.send\\\"\\n\",\n    \"    ) as mock:\\n\",\n    \"        mock.return_value = asyncio.create_task(_f())\\n\",\n    \"\\n\",\n    \"        yield mock\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4d66e6b0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@asynccontextmanager\\n\",\n    \"async def mock_producer_send_env() -> AsyncGenerator[\\n\",\n    \"    Tuple[Mock, AIOKafkaProducer], None\\n\",\n    \"]:\\n\",\n    \"    try:\\n\",\n    \"        with mock_InMemoryProducer_send() as send_mock:\\n\",\n    \"            with InMemoryBroker() as broker:\\n\",\n    \"                ProducerClass = InMemoryProducer(broker)\\n\",\n    \"                producer = ProducerClass()\\n\",\n    \"                await producer.start()\\n\",\n    \"\\n\",\n    \"                yield send_mock, producer\\n\",\n    \"    finally:\\n\",\n    \"        await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"60e06f18\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@asynccontextmanager\\n\",\n    \"async def mock_producer_batch_env() -> AsyncGenerator[\\n\",\n    \"    Tuple[Mock, AIOKafkaProducer], None\\n\",\n    \"]:\\n\",\n    \"    try:\\n\",\n    \"        with unittest.mock.patch(\\n\",\n    \"            \\\"fastkafka._testing.in_memory_broker.InMemoryProducer.send_batch\\\"\\n\",\n    \"        ) as send_batch_mock, unittest.mock.patch(\\n\",\n    \"            \\\"fastkafka._testing.in_memory_broker.InMemoryProducer.create_batch\\\"\\n\",\n    \"        ) as create_batch_mock:\\n\",\n    \"            batch_mock = Mock()\\n\",\n    \"            create_batch_mock.return_value = batch_mock\\n\",\n    \"            send_batch_mock.return_value = asyncio.create_task(_f())\\n\",\n    \"            with InMemoryBroker() as broker:\\n\",\n    \"                ProducerClass = InMemoryProducer(broker)\\n\",\n    \"                producer = ProducerClass()\\n\",\n    \"                await producer.start()\\n\",\n    \"\\n\",\n    \"                yield batch_mock, send_batch_mock, producer\\n\",\n    \"    finally:\\n\",\n    \"        await producer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4384ad73\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Testing with: is_sync=False , encoder_fn=<function json_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"Testing with: is_sync=False , encoder_fn=<function avro_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async def func_async(mock_msg: MockMsg) -> MockMsg:\\n\",\n    \"    return mock_msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def func_sync(mock_msg: MockMsg) -> MockMsg:\\n\",\n    \"    return mock_msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"is_sync = False\\n\",\n    \"for encoder_fn in [json_encoder, avro_encoder]:\\n\",\n    \"    print(f\\\"Testing with: {is_sync=} , {encoder_fn=}\\\")\\n\",\n    \"    async with mock_producer_send_env() as (send_mock, producer):\\n\",\n    \"        test_func = producer_decorator(\\n\",\n    \"            {topic: (None, producer, None, None)},\\n\",\n    \"            func_sync if is_sync else func_async,\\n\",\n    \"            topic,\\n\",\n    \"            encoder_fn=encoder_fn,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert iscoroutinefunction(test_func) != is_sync\\n\",\n    \"\\n\",\n    \"        value = test_func(mock_msg) if is_sync else await test_func(mock_msg)\\n\",\n    \"\\n\",\n    \"        send_mock.assert_called_once_with(remove_suffix(topic), encoder_fn(mock_msg), key=None)\\n\",\n    \"\\n\",\n    \"        assert value == mock_msg\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9bec5c19\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Testing with: is_sync=False , encoder_fn=<function json_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"Testing with: is_sync=False , encoder_fn=<function avro_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"test_key = b\\\"key\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def func_async(mock_msg: MockMsg) -> KafkaEvent[MockMsg]:\\n\",\n    \"    return KafkaEvent(mock_msg, test_key)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def func_sync(mock_msg: MockMsg) -> KafkaEvent[MockMsg]:\\n\",\n    \"    return KafkaEvent(mock_msg, test_key)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"is_sync = False\\n\",\n    \"for encoder_fn in [json_encoder, avro_encoder]:\\n\",\n    \"    print(f\\\"Testing with: {is_sync=} , {encoder_fn=}\\\")\\n\",\n    \"    async with mock_producer_send_env() as (send_mock, producer):\\n\",\n    \"        test_func = producer_decorator(\\n\",\n    \"            {topic: (None, producer, None, None)},\\n\",\n    \"            func_sync if is_sync else func_async,\\n\",\n    \"            topic,\\n\",\n    \"            encoder_fn=encoder_fn,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert iscoroutinefunction(test_func) != is_sync\\n\",\n    \"\\n\",\n    \"        value = test_func(mock_msg) if is_sync else await test_func(mock_msg)\\n\",\n    \"\\n\",\n    \"        send_mock.assert_called_once_with(remove_suffix(topic), encoder_fn(mock_msg), key=test_key)\\n\",\n    \"\\n\",\n    \"        assert value == KafkaEvent(mock_msg, test_key)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8df14a18\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Testing with: is_sync=False , encoder_fn=<function json_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"Testing with: is_sync=False , encoder_fn=<function avro_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"batch_size = 123\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def func_async(mock_msg: MockMsg) -> List[MockMsg]:\\n\",\n    \"    return [mock_msg] * batch_size\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def func_sync(mock_msg: MockMsg) -> List[MockMsg]:\\n\",\n    \"    return [mock_msg] * batch_size\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"is_sync = False\\n\",\n    \"for encoder_fn in [json_encoder, avro_encoder]:\\n\",\n    \"    print(f\\\"Testing with: {is_sync=} , {encoder_fn=}\\\")\\n\",\n    \"    async with mock_producer_batch_env() as (\\n\",\n    \"        batch_mock,\\n\",\n    \"        send_batch_mock,\\n\",\n    \"        producer,\\n\",\n    \"    ):\\n\",\n    \"        test_func = producer_decorator(\\n\",\n    \"            {topic: (None, producer, None, None)},\\n\",\n    \"            func_sync if is_sync else func_async,\\n\",\n    \"            topic,\\n\",\n    \"            encoder_fn=encoder_fn,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert iscoroutinefunction(test_func) != is_sync\\n\",\n    \"\\n\",\n    \"        value = test_func(mock_msg) if is_sync else await test_func(mock_msg)\\n\",\n    \"\\n\",\n    \"        batch_mock.append.assert_has_calls(\\n\",\n    \"            [call(key=None, value=encoder_fn(mock_msg), timestamp=ANY)] * batch_size\\n\",\n    \"        )\\n\",\n    \"        send_batch_mock.assert_called_once_with(batch_mock, remove_suffix(topic), partition=0)\\n\",\n    \"\\n\",\n    \"        assert value == [mock_msg] * batch_size\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"beb344fb\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Testing with: is_sync=False , encoder_fn=<function json_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"Testing with: is_sync=False , encoder_fn=<function avro_encoder>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"batch_size = 123\\n\",\n    \"test_key = b\\\"key\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def func_async(mock_msg: MockMsg) -> KafkaEvent[List[MockMsg]]:\\n\",\n    \"    return KafkaEvent([mock_msg] * batch_size, test_key)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def func_sync(mock_msg: MockMsg) -> KafkaEvent[List[MockMsg]]:\\n\",\n    \"    return KafkaEvent([mock_msg] * batch_size, test_key)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"is_sync = False\\n\",\n    \"for encoder_fn in [json_encoder, avro_encoder]:\\n\",\n    \"    print(f\\\"Testing with: {is_sync=} , {encoder_fn=}\\\")\\n\",\n    \"    async with mock_producer_batch_env() as (batch_mock, send_batch_mock, producer):\\n\",\n    \"        test_func = producer_decorator(\\n\",\n    \"            {topic: (None, producer, None, None)},\\n\",\n    \"            func_sync if is_sync else func_async,\\n\",\n    \"            topic,\\n\",\n    \"            encoder_fn=encoder_fn,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        assert iscoroutinefunction(test_func) != is_sync\\n\",\n    \"\\n\",\n    \"        value = test_func(mock_msg) if is_sync else await test_func(mock_msg)\\n\",\n    \"\\n\",\n    \"        batch_mock.append.assert_has_calls(\\n\",\n    \"            [call(key=test_key, value=encoder_fn(mock_msg), timestamp=ANY)] * batch_size\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        send_batch_mock.assert_called_once_with(batch_mock, remove_suffix(topic), partition=0)\\n\",\n    \"\\n\",\n    \"        assert value == KafkaEvent([mock_msg] * batch_size, test_key)\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/014_AsyncAPI.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ff734a78\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.asyncapi\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e3c8cd28\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import json\\n\",\n    \"import platform\\n\",\n    \"import shutil\\n\",\n    \"import subprocess  # nosec: B404: Consider possible security implications associated with the subprocess module.\\n\",\n    \"import tempfile\\n\",\n    \"from datetime import timedelta\\n\",\n    \"from enum import Enum\\n\",\n    \"from pathlib import Path\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from pydantic import ConfigDict, BaseModel, Field, HttpUrl, model_serializer\\n\",\n    \"from pydantic.type_adapter import TypeAdapter\\n\",\n    \"\\n\",\n    \"from fastkafka._components.aiokafka_consumer_loop import ConsumeCallable\\n\",\n    \"from fastkafka._components.docs_dependencies import _check_npm_with_local\\n\",\n    \"from fastkafka._components.helpers import unwrap_list_type\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.producer_decorator import (\\n\",\n    \"    ProduceCallable,\\n\",\n    \"    unwrap_from_kafka_event,\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"16ea81bb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from datetime import datetime\\n\",\n    \"\\n\",\n    \"import pytest\\n\",\n    \"from pydantic import EmailStr\\n\",\n    \"from rich.pretty import pprint\\n\",\n    \"\\n\",\n    \"from fastkafka._components.aiokafka_consumer_loop import EventMetadata\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._components.producer_decorator import KafkaEvent\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b33a28e5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"41eb7517\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1a1c3419\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class KafkaMessage(BaseModel):\\n\",\n    \"    # This following config is used to properly format timedelta in ISO 8601 format\\n\",\n    \"    model_config = ConfigDict(ser_json_timedelta=\\\"iso8601\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bb0c4051\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">MyMsg</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">dt</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">datetime</span><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">.datetime</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">2021</span>, <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">12</span>, <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">31</span>, <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">23</span>, <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">59</span>, <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">58</span><span style=\\\"font-weight: bold\\\">)</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">td</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">datetime</span><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">.timedelta</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">days</span>=<span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">1</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">seconds</span>=<span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">43321</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">microseconds</span>=<span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">234568</span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mMyMsg\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mdt\\u001b[0m=\\u001b[1;35mdatetime\\u001b[0m\\u001b[1;35m.datetime\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[1;36m2021\\u001b[0m, \\u001b[1;36m12\\u001b[0m, \\u001b[1;36m31\\u001b[0m, \\u001b[1;36m23\\u001b[0m, \\u001b[1;36m59\\u001b[0m, \\u001b[1;36m58\\u001b[0m\\u001b[1m)\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mtd\\u001b[0m=\\u001b[1;35mdatetime\\u001b[0m\\u001b[1;35m.timedelta\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[33mdays\\u001b[0m=\\u001b[1;36m1\\u001b[0m, \\u001b[33mseconds\\u001b[0m=\\u001b[1;36m43321\\u001b[0m, \\u001b[33mmicroseconds\\u001b[0m=\\u001b[1;36m234568\\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class MyMsg(KafkaMessage):\\n\",\n    \"    dt: datetime = Field(..., example=datetime.now())\\n\",\n    \"    td: timedelta = Field(timedelta(days=1, hours=12, minutes=2, seconds=1.2345678))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"my_msg = MyMsg(dt=datetime(year=2021, month=12, day=31, hour=23, minute=59, second=58))\\n\",\n    \"pprint(my_msg)\\n\",\n    \"expected = '{\\\"dt\\\":\\\"2021-12-31T23:59:58\\\",\\\"td\\\":\\\"P1DT43321.234568S\\\"}'\\n\",\n    \"actual = my_msg.model_dump_json()\\n\",\n    \"assert actual == expected, f\\\"{actual} != {expected}\\\"\\n\",\n    \"\\n\",\n    \"actual = MyMsg.model_validate_json(actual)\\n\",\n    \"assert actual == my_msg\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d622ee56\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class SecurityType(str, Enum):\\n\",\n    \"    plain = \\\"plain\\\"\\n\",\n    \"    userPassword = \\\"userPassword\\\"\\n\",\n    \"    apiKey = \\\"apiKey\\\"\\n\",\n    \"    X509 = \\\"X509\\\"\\n\",\n    \"    symmetricEncryption = \\\"symmetricEncryption\\\"\\n\",\n    \"    asymmetricEncryption = \\\"asymmetricEncryption\\\"\\n\",\n    \"    httpApiKey = \\\"httpApiKey\\\"\\n\",\n    \"    http = \\\"http\\\"\\n\",\n    \"    oauth2 = \\\"oauth2\\\"\\n\",\n    \"    openIdConnect = \\\"openIdConnect\\\"\\n\",\n    \"    scramSha256 = \\\"scramSha256\\\"\\n\",\n    \"    scramSha512 = \\\"scramSha512\\\"\\n\",\n    \"    gssapi = \\\"gssapi\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class APIKeyLocation(str, Enum):\\n\",\n    \"    user = \\\"user\\\"\\n\",\n    \"    password = \\\"password\\\"  # nosec\\n\",\n    \"    query = \\\"query\\\"\\n\",\n    \"    header = \\\"header\\\"\\n\",\n    \"    cookie = \\\"cookie\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"sec_scheme_name_mapping = {\\\"security_type\\\": \\\"type\\\", \\\"api_key_loc\\\": \\\"in\\\"}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class SecuritySchema(BaseModel):\\n\",\n    \"    security_type: SecurityType = Field(..., example=\\\"plain\\\")\\n\",\n    \"    description: Optional[str] = Field(None, example=\\\"My security scheme\\\")\\n\",\n    \"    name: Optional[str] = Field(None, example=\\\"my_secret_scheme\\\")\\n\",\n    \"    api_key_loc: Optional[APIKeyLocation] = Field(None, example=\\\"user\\\")\\n\",\n    \"    scheme: Optional[str] = None\\n\",\n    \"    bearerFormat: Optional[str] = None\\n\",\n    \"    flows: Optional[str] = None\\n\",\n    \"    openIdConnectUrl: Optional[str] = None\\n\",\n    \"\\n\",\n    \"    def __init__(self, **kwargs: Any):\\n\",\n    \"        for k, v in sec_scheme_name_mapping.items():\\n\",\n    \"            if v in kwargs:\\n\",\n    \"                kwargs[k] = kwargs.pop(v)\\n\",\n    \"        super().__init__(**kwargs)\\n\",\n    \"\\n\",\n    \"    def model_dump(self, *args: Any, **kwargs: Any) -> Dict[str, Any]:\\n\",\n    \"        \\\"\\\"\\\"Renames internal names of members ('security_type' -> 'type', 'api_key_loc' -> 'in')\\\"\\\"\\\"\\n\",\n    \"        d = super().model_dump(*args, **kwargs)\\n\",\n    \"\\n\",\n    \"        for k, v in sec_scheme_name_mapping.items():\\n\",\n    \"            d[v] = d.pop(k)\\n\",\n    \"\\n\",\n    \"        # removes None values\\n\",\n    \"        d = {k: v for k, v in d.items() if v is not None}\\n\",\n    \"\\n\",\n    \"        return d\\n\",\n    \"\\n\",\n    \"    def model_dump_json(self, *args: Any, **kwargs: Any) -> str:\\n\",\n    \"        \\\"\\\"\\\"Serialize into JSON using model_dump()\\\"\\\"\\\"\\n\",\n    \"        return json.dumps(self.model_dump(), *args, **kwargs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"96810c7c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">SecuritySchema</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security_type</span>=<span style=\\\"font-weight: bold\\\">&lt;</span><span style=\\\"color: #ff00ff; text-decoration-color: #ff00ff; font-weight: bold\\\">SecurityType.plain:</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\"> </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">&gt;</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">api_key_loc</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">scheme</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">bearerFormat</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">flows</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">openIdConnectUrl</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mSecuritySchema\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33msecurity_type\\u001b[0m=\\u001b[1m<\\u001b[0m\\u001b[1;95mSecurityType.plain:\\u001b[0m\\u001b[39m \\u001b[0m\\u001b[32m'plain'\\u001b[0m\\u001b[1m>\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mapi_key_loc\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mscheme\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mbearerFormat\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mflows\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mopenIdConnectUrl\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"JSON={\\\"type\\\": \\\"plain\\\"}\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">SecuritySchema</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security_type</span>=<span style=\\\"font-weight: bold\\\">&lt;</span><span style=\\\"color: #ff00ff; text-decoration-color: #ff00ff; font-weight: bold\\\">SecurityType.plain:</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\"> </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">&gt;</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">api_key_loc</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">scheme</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">bearerFormat</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">flows</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">openIdConnectUrl</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mSecuritySchema\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33msecurity_type\\u001b[0m=\\u001b[1m<\\u001b[0m\\u001b[1;95mSecurityType.plain:\\u001b[0m\\u001b[39m \\u001b[0m\\u001b[32m'plain'\\u001b[0m\\u001b[1m>\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mapi_key_loc\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mscheme\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mbearerFormat\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mflows\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mopenIdConnectUrl\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"sec_schema = SecuritySchema(type=\\\"plain\\\")\\n\",\n    \"pprint(sec_schema)\\n\",\n    \"\\n\",\n    \"actual = sec_schema.model_dump_json()\\n\",\n    \"print(f\\\"JSON={actual}\\\")\\n\",\n    \"assert actual == '{\\\"type\\\": \\\"plain\\\"}', actual\\n\",\n    \"\\n\",\n    \"actual = SecuritySchema.model_validate_json(sec_schema.model_dump_json())\\n\",\n    \"pprint(actual)\\n\",\n    \"assert actual == sec_schema\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0f9fe7e7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class KafkaBroker(BaseModel):\\n\",\n    \"    \\\"\\\"\\\"Kafka broker\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    url: str = Field(..., example=\\\"localhost\\\")\\n\",\n    \"    description: str = Field(\\\"Kafka broker\\\")\\n\",\n    \"    port: Union[str, int] = Field(\\\"9092\\\")\\n\",\n    \"    protocol: str = Field(\\\"kafka\\\")\\n\",\n    \"    security: Optional[SecuritySchema] = None\\n\",\n    \"\\n\",\n    \"    def model_dump(self, *args: Any, **kwargs: Any) -> Dict[str, Any]:\\n\",\n    \"        \\\"\\\"\\\"Makes port a variable and remove it from the dictionary\\\"\\\"\\\"\\n\",\n    \"        d = super().model_dump(*args, **kwargs)\\n\",\n    \"        if self.security:\\n\",\n    \"            d[\\\"security\\\"] = self.security.model_dump(*args, **kwargs)\\n\",\n    \"        d[\\\"variables\\\"] = {\\\"port\\\": {\\\"default\\\": str(self.port)}}\\n\",\n    \"        d.pop(\\\"port\\\")\\n\",\n    \"\\n\",\n    \"        d = {k: v for k, v in d.items() if v is not None}\\n\",\n    \"\\n\",\n    \"        return d\\n\",\n    \"\\n\",\n    \"    def model_dump_json(self, *args: Any, **kwargs: Any) -> str:\\n\",\n    \"        \\\"\\\"\\\"Serialize into JSON using dict()\\\"\\\"\\\"\\n\",\n    \"        return json.dumps(self.model_dump(), *args, **kwargs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2d6f50fd\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m, \\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m, \\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m, \\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m, \\u001b[33msecurity\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"kafka_broker = KafkaBroker(url=\\\"kafka\\\")\\n\",\n    \"pprint(kafka_broker)\\n\",\n    \"\\n\",\n    \"expected = '{\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}'\\n\",\n    \"print(kafka_broker.model_dump_json())\\n\",\n    \"assert kafka_broker.model_dump_json() == expected\\n\",\n    \"\\n\",\n    \"# serialization/deserialization test\\n\",\n    \"actual = KafkaBroker.model_validate_json(kafka_broker.model_dump_json())\\n\",\n    \"assert actual == kafka_broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b635ef47\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka-secure'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">SecuritySchema</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security_type</span>=<span style=\\\"font-weight: bold\\\">&lt;</span><span style=\\\"color: #ff00ff; text-decoration-color: #ff00ff; font-weight: bold\\\">SecurityType.plain:</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\"> </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">&gt;</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">api_key_loc</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">scheme</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">bearerFormat</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">flows</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">openIdConnectUrl</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka-secure'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33msecurity\\u001b[0m=\\u001b[1;35mSecuritySchema\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33msecurity_type\\u001b[0m=\\u001b[1m<\\u001b[0m\\u001b[1;95mSecurityType.plain:\\u001b[0m\\u001b[39m \\u001b[0m\\u001b[32m'plain'\\u001b[0m\\u001b[1m>\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33mapi_key_loc\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33mscheme\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33mbearerFormat\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33mflows\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[33mopenIdConnectUrl\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"JSON={\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka-secure\\\", \\\"security\\\": {\\\"type\\\": \\\"plain\\\"}, \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"sec_kafka_broker = KafkaBroker(\\n\",\n    \"    url=\\\"kafka\\\", protocol=\\\"kafka-secure\\\", security=SecuritySchema(type=\\\"plain\\\")\\n\",\n    \")\\n\",\n    \"pprint(sec_kafka_broker)\\n\",\n    \"\\n\",\n    \"expected = '{\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka-secure\\\", \\\"security\\\": {\\\"type\\\": \\\"plain\\\"}, \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}'\\n\",\n    \"actual = sec_kafka_broker.model_dump_json()\\n\",\n    \"print(f\\\"JSON={actual}\\\")\\n\",\n    \"assert actual == expected\\n\",\n    \"\\n\",\n    \"# serialization/deserialization test\\n\",\n    \"actual = KafkaBroker.model_validate_json(sec_kafka_broker.model_dump_json())\\n\",\n    \"assert actual == sec_kafka_broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b9375b58\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class ContactInfo(BaseModel):\\n\",\n    \"    name: str = Field(..., example=\\\"My company\\\")\\n\",\n    \"    url: HttpUrl = Field(..., example=\\\"https://www.github.com/mycompany\\\")\\n\",\n    \"    email: str = Field(..., example=\\\"noreply@mycompany.com\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class KafkaServiceInfo(BaseModel):\\n\",\n    \"    title: str = Field(\\\"Title\\\")\\n\",\n    \"    version: str = Field(\\\"0.0.1\\\")\\n\",\n    \"    description: str = Field(\\\"Description of the service\\\")\\n\",\n    \"    contact: ContactInfo = Field(\\n\",\n    \"        ...,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3feb80d0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaServiceInfo</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">title</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Title'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">version</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'0.0.1'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Description of the service'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">contact</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">ContactInfo</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'ACME'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">Url</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://www.acme.com/'</span><span style=\\\"font-weight: bold\\\">)</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">email</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'noreply@acme.com'</span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mKafkaServiceInfo\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mtitle\\u001b[0m=\\u001b[32m'Title'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mversion\\u001b[0m=\\u001b[32m'0.0.1'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[32m'Description of the service'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mcontact\\u001b[0m=\\u001b[1;35mContactInfo\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[32m'ACME'\\u001b[0m, \\u001b[33murl\\u001b[0m=\\u001b[1;35mUrl\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[32m'https://www.acme.com/'\\u001b[0m\\u001b[1m)\\u001b[0m, \\u001b[33memail\\u001b[0m=\\u001b[32m'noreply@acme.com'\\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"my_contact = ContactInfo(\\n\",\n    \"    name=\\\"ACME\\\", url=\\\"https://www.acme.com\\\", email=\\\"noreply@acme.com\\\"\\n\",\n    \")\\n\",\n    \"service_info = KafkaServiceInfo(contact=my_contact)\\n\",\n    \"pprint(service_info)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e19f636c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class KafkaBrokers(BaseModel):\\n\",\n    \"    brokers: Dict[str, Union[List[KafkaBroker], KafkaBroker]]\\n\",\n    \"\\n\",\n    \"    def model_dump(self, *args: Any, **kwargs: Any) -> Dict[str, Any]:\\n\",\n    \"        \\\"\\\"\\\"Transcribe brokers into bootstrap server groups\\\"\\\"\\\"\\n\",\n    \"        d = super().model_dump(*args, **kwargs)\\n\",\n    \"\\n\",\n    \"        brokers = {}\\n\",\n    \"        for k, v in self.brokers.items():\\n\",\n    \"            if isinstance(v, list):\\n\",\n    \"                brokers.update(\\n\",\n    \"                    {f\\\"{k}-bootstrap-server-{i}\\\": u_v.model_dump() for i, u_v in enumerate(v)}\\n\",\n    \"                )\\n\",\n    \"            else:\\n\",\n    \"                brokers.update({f\\\"{k}\\\": v.model_dump()})\\n\",\n    \"        d[\\\"brokers\\\"] = brokers\\n\",\n    \"        d = {k: v for k, v in d.items() if v is not None}\\n\",\n    \"\\n\",\n    \"        return d\\n\",\n    \"\\n\",\n    \"    def model_dump_json(self, *args: Any, **kwargs: Any) -> str:\\n\",\n    \"        \\\"\\\"\\\"Serialize into JSON using dict()\\\"\\\"\\\"\\n\",\n    \"        return json.dumps(self.model_dump(), *args, **kwargs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c08cc142\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBrokers</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">brokers</span>=<span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'dev'</span>: <span style=\\\"font-weight: bold\\\">[</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging'</span>: <span style=\\\"font-weight: bold\\\">[</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka-secure'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">SecuritySchema</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security_type</span>=<span style=\\\"font-weight: bold\\\">&lt;</span><span style=\\\"color: #ff00ff; text-decoration-color: #ff00ff; font-weight: bold\\\">SecurityType.plain:</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\"> </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">&gt;</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">api_key_loc</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">scheme</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">bearerFormat</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">flows</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">openIdConnectUrl</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">]</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mKafkaBrokers\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mbrokers\\u001b[0m=\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'dev'\\u001b[0m: \\u001b[1m[\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m, \\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m, \\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m, \\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m, \\u001b[33msecurity\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'staging'\\u001b[0m: \\u001b[1m[\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka-secure'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33msecurity\\u001b[0m=\\u001b[1;35mSecuritySchema\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33msecurity_type\\u001b[0m=\\u001b[1m<\\u001b[0m\\u001b[1;95mSecurityType.plain:\\u001b[0m\\u001b[39m \\u001b[0m\\u001b[32m'plain'\\u001b[0m\\u001b[1m>\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33mapi_key_loc\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33mscheme\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33mbearerFormat\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33mflows\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[33mopenIdConnectUrl\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m]\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"JSON={\\\"brokers\\\": {\\\"dev-bootstrap-server-0\\\": {\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}, \\\"staging-bootstrap-server-0\\\": {\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka-secure\\\", \\\"security\\\": {\\\"type\\\": \\\"plain\\\"}, \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}}}\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBrokers</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">brokers</span>=<span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'dev-bootstrap-server-0'</span>: <span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">)</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging-bootstrap-server-0'</span>: <span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka-secure'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">SecuritySchema</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security_type</span>=<span style=\\\"font-weight: bold\\\">&lt;</span><span style=\\\"color: #ff00ff; text-decoration-color: #ff00ff; font-weight: bold\\\">SecurityType.plain:</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\"> </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">&gt;</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">api_key_loc</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">scheme</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">bearerFormat</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">flows</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">openIdConnectUrl</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mKafkaBrokers\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mbrokers\\u001b[0m=\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'dev-bootstrap-server-0'\\u001b[0m: \\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33msecurity\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m)\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'staging-bootstrap-server-0'\\u001b[0m: \\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka-secure'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33msecurity\\u001b[0m=\\u001b[1;35mSecuritySchema\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33msecurity_type\\u001b[0m=\\u001b[1m<\\u001b[0m\\u001b[1;95mSecurityType.plain:\\u001b[0m\\u001b[39m \\u001b[0m\\u001b[32m'plain'\\u001b[0m\\u001b[1m>\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mapi_key_loc\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mscheme\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mbearerFormat\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mflows\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mopenIdConnectUrl\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"kafka_brokers = KafkaBrokers(brokers={\\\"dev\\\": [kafka_broker], \\\"staging\\\": [sec_kafka_broker]})\\n\",\n    \"pprint(kafka_brokers)\\n\",\n    \"\\n\",\n    \"expected = '{\\\"brokers\\\": {\\\"dev-bootstrap-server-0\\\": {\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}, \\\"staging-bootstrap-server-0\\\": {\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka-secure\\\", \\\"security\\\": {\\\"type\\\": \\\"plain\\\"}, \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}}}'\\n\",\n    \"\\n\",\n    \"actual = kafka_brokers.model_dump_json()\\n\",\n    \"print(f\\\"JSON={actual}\\\")\\n\",\n    \"assert actual == expected, actual\\n\",\n    \"\\n\",\n    \"actual = KafkaBrokers.model_validate_json(kafka_brokers.model_dump_json())\\n\",\n    \"pprint(actual)\\n\",\n    \"# assert actual == kafka_brokers\\n\",\n    \"assert actual.model_dump_json() == kafka_brokers.model_dump_json()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d04f1234\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"/tmp/ipykernel_133/2021612751.py:4: PydanticDeprecatedSince20: The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, otherwise load the data then use `model_validate` instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.0/migration/\\n\",\n      \"  kafka_brokers = KafkaBrokers.parse_raw(brokers_json)\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBrokers</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">brokers</span>=<span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'dev'</span>: <span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span><span style=\\\"font-weight: bold\\\">)</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging'</span>: <span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaBroker</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">port</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">protocol</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka-secure'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">SecuritySchema</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">security_type</span>=<span style=\\\"font-weight: bold\\\">&lt;</span><span style=\\\"color: #ff00ff; text-decoration-color: #ff00ff; font-weight: bold\\\">SecurityType.plain:</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\"> </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">&gt;</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">api_key_loc</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">scheme</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">bearerFormat</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">flows</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">openIdConnectUrl</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-style: italic\\\">None</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mKafkaBrokers\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mbrokers\\u001b[0m=\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'dev'\\u001b[0m: \\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m, \\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m, \\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m, \\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m, \\u001b[33msecurity\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\u001b[1m)\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'staging'\\u001b[0m: \\u001b[1;35mKafkaBroker\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33murl\\u001b[0m=\\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mport\\u001b[0m=\\u001b[32m'9092'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33mprotocol\\u001b[0m=\\u001b[32m'kafka-secure'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[33msecurity\\u001b[0m=\\u001b[1;35mSecuritySchema\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33msecurity_type\\u001b[0m=\\u001b[1m<\\u001b[0m\\u001b[1;95mSecurityType.plain:\\u001b[0m\\u001b[39m \\u001b[0m\\u001b[32m'plain'\\u001b[0m\\u001b[1m>\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mapi_key_loc\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mscheme\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mbearerFormat\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mflows\\u001b[0m=\\u001b[3;35mNone\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[33mopenIdConnectUrl\\u001b[0m=\\u001b[3;35mNone\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">KafkaServiceInfo</span><span style=\\\"font-weight: bold\\\">(</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">title</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Title'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">version</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'0.0.1'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">description</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Description of the service'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">contact</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">ContactInfo</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #808000; text-decoration-color: #808000\\\">name</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'ACME'</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">url</span>=<span style=\\\"color: #800080; text-decoration-color: #800080; font-weight: bold\\\">Url</span><span style=\\\"font-weight: bold\\\">(</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://www.acme.com/'</span><span style=\\\"font-weight: bold\\\">)</span>, <span style=\\\"color: #808000; text-decoration-color: #808000\\\">email</span>=<span style=\\\"color: #008000; text-decoration-color: #008000\\\">'noreply@acme.com'</span><span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">)</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1;35mKafkaServiceInfo\\u001b[0m\\u001b[1m(\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mtitle\\u001b[0m=\\u001b[32m'Title'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mversion\\u001b[0m=\\u001b[32m'0.0.1'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mdescription\\u001b[0m=\\u001b[32m'Description of the service'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[33mcontact\\u001b[0m=\\u001b[1;35mContactInfo\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[33mname\\u001b[0m=\\u001b[32m'ACME'\\u001b[0m, \\u001b[33murl\\u001b[0m=\\u001b[1;35mUrl\\u001b[0m\\u001b[1m(\\u001b[0m\\u001b[32m'https://www.acme.com/'\\u001b[0m\\u001b[1m)\\u001b[0m, \\u001b[33memail\\u001b[0m=\\u001b[32m'noreply@acme.com'\\u001b[0m\\u001b[1m)\\u001b[0m\\n\",\n       \"\\u001b[1m)\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'consumers'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_1'</span>: <span style=\\\"font-weight: bold\\\">&lt;</span><span style=\\\"color: #ff00ff; text-decoration-color: #ff00ff; font-weight: bold\\\">function</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\"> on_my_topic_one at </span><span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">0x7f1e585d4540</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">&gt;,</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_2'</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">: &lt;function on_my_topic_2 at </span><span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">0x7f1e585d7380</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">&gt;</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #000000; text-decoration-color: #000000; font-weight: bold\\\">}</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">,</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'producers'</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">: </span><span style=\\\"color: #000000; text-decoration-color: #000000; font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_3'</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">: &lt;function to_my_topic_3 at </span><span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">0x7f1e585d7420</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">&gt;,</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_4'</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">: &lt;function to_my_topic_4 at </span><span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">0x7f1e585d74c0</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">&gt;,</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_5'</span><span style=\\\"color: #000000; text-decoration-color: #000000\\\">: &lt;function to_my_topic_5 at </span><span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">0x7f1e585d7560</span><span style=\\\"font-weight: bold\\\">&gt;</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'consumers'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_1'\\u001b[0m: \\u001b[1m<\\u001b[0m\\u001b[1;95mfunction\\u001b[0m\\u001b[39m on_my_topic_one at \\u001b[0m\\u001b[1;36m0x7f1e585d4540\\u001b[0m\\u001b[39m>,\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_2'\\u001b[0m\\u001b[39m: <function on_my_topic_2 at \\u001b[0m\\u001b[1;36m0x7f1e585d7380\\u001b[0m\\u001b[39m>\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1;39m}\\u001b[0m\\u001b[39m,\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'producers'\\u001b[0m\\u001b[39m: \\u001b[0m\\u001b[1;39m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_3'\\u001b[0m\\u001b[39m: <function to_my_topic_3 at \\u001b[0m\\u001b[1;36m0x7f1e585d7420\\u001b[0m\\u001b[39m>,\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_4'\\u001b[0m\\u001b[39m: <function to_my_topic_4 at \\u001b[0m\\u001b[1;36m0x7f1e585d74c0\\u001b[0m\\u001b[39m>,\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_5'\\u001b[0m\\u001b[39m: <function to_my_topic_5 at \\u001b[0m\\u001b[1;36m0x7f1e585d7560\\u001b[0m\\u001b[1m>\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"brokers_json = '{\\\"brokers\\\": {\\\"dev\\\": {\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}, \\\"staging\\\": {\\\"url\\\": \\\"kafka\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka-secure\\\", \\\"security\\\": {\\\"type\\\": \\\"plain\\\"}, \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}}}'\\n\",\n    \"kafka_brokers = KafkaBrokers.parse_raw(brokers_json)\\n\",\n    \"pprint(kafka_brokers)\\n\",\n    \"\\n\",\n    \"my_contact = ContactInfo(\\n\",\n    \"    name=\\\"ACME\\\", url=\\\"https://www.acme.com\\\", email=\\\"noreply@acme.com\\\"\\n\",\n    \")\\n\",\n    \"kafka_service_info = KafkaServiceInfo(contact=my_contact)\\n\",\n    \"pprint(kafka_service_info)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class MyInfo(KafkaMessage):\\n\",\n    \"    mobile: str = Field(..., example=\\\"+385987654321\\\")\\n\",\n    \"    name: str = Field(..., example=\\\"James Bond\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class MyMsgUrl(KafkaMessage):\\n\",\n    \"    info: MyInfo = Field(..., example=dict(mobile=\\\"+385987654321\\\", name=\\\"James Bond\\\"))\\n\",\n    \"    url: HttpUrl = Field(..., example=\\\"https://sis.gov.uk/agents/007\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class MyMsgEmail(KafkaMessage):\\n\",\n    \"    msg_url: MyMsgUrl = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=dict(\\n\",\n    \"            info=dict(mobile=\\\"+385987654321\\\", name=\\\"James Bond\\\"),\\n\",\n    \"            url=\\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"        ),\\n\",\n    \"    )\\n\",\n    \"    email: EmailStr = Field(..., example=\\\"agent-007@sis.gov.uk\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def on_my_topic_one(msg: MyMsgUrl) -> None:\\n\",\n    \"    raise NotImplemented\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def on_my_topic_2(msg: MyMsgEmail) -> None:\\n\",\n    \"    raise NotImplemented\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def on_my_topic_2_meta(msg: MyMsgEmail, meta: EventMetadata) -> None:\\n\",\n    \"    raise NotImplemented\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def on_my_topic_2_batch(msg: List[MyMsgEmail]) -> None:\\n\",\n    \"    raise NotImplemented\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def to_my_topic_3(msg) -> MyMsgUrl:\\n\",\n    \"    raise NotImplemented\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def to_my_topic_4(msg) -> KafkaEvent[MyMsgEmail]:\\n\",\n    \"    raise NotImplemented\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def to_my_topic_5(msg) -> KafkaEvent[List[MyMsgEmail]]:\\n\",\n    \"    raise NotImplemented\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"consumers = {\\\"my_topic_1\\\": on_my_topic_one, \\\"my_topic_2\\\": on_my_topic_2}\\n\",\n    \"producers = {\\n\",\n    \"    \\\"my_topic_3\\\": to_my_topic_3,\\n\",\n    \"    \\\"my_topic_4\\\": to_my_topic_4,\\n\",\n    \"    \\\"my_topic_5\\\": to_my_topic_5,\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"pprint(dict(consumers=consumers, producers=producers))\\n\",\n    \"assert set(consumers.keys()) == set([\\\"my_topic_1\\\", \\\"my_topic_2\\\"])\\n\",\n    \"assert set(producers.keys()) == set([\\\"my_topic_3\\\", \\\"my_topic_4\\\", \\\"my_topic_5\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"41800426\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"# T = TypeVar(\\\"T\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_msg_cls_for_producer(f: ProduceCallable) -> Type[Any]:\\n\",\n    \"    types = get_type_hints(f)\\n\",\n    \"    return_type = types.pop(\\\"return\\\", type(None))\\n\",\n    \"    # @app.producer must define a return value\\n\",\n    \"    if return_type == type(None):\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"Producer function must have a defined return value, got {return_type} as return value\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    return_type = unwrap_from_kafka_event(return_type)\\n\",\n    \"    return_type = unwrap_list_type(return_type)\\n\",\n    \"\\n\",\n    \"    if not hasattr(return_type, \\\"json\\\"):\\n\",\n    \"        raise ValueError(f\\\"Producer function return value must have json method\\\")\\n\",\n    \"    return return_type  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"629071b5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"__main__.MyMsgUrl\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"expected = MyMsgUrl\\n\",\n    \"actual = _get_msg_cls_for_producer(to_my_topic_3)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b5e40210\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"__main__.MyMsgEmail\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"expected = MyMsgEmail\\n\",\n    \"actual = _get_msg_cls_for_producer(to_my_topic_4)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2df557a2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"__main__.MyMsgEmail\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"expected = MyMsgEmail\\n\",\n    \"actual = _get_msg_cls_for_producer(to_my_topic_5)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"544f8bc2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def no_return(i: int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    _get_msg_cls_for_producer(no_return)\\n\",\n    \"\\n\",\n    \"assert e.value.args == (\\n\",\n    \"    \\\"Producer function must have a defined return value, got <class 'NoneType'> as return value\\\",\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c428d88c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_msg_cls_for_consumer(f: ConsumeCallable) -> Type[Any]:\\n\",\n    \"    types = get_type_hints(f)\\n\",\n    \"    return_type = types.pop(\\\"return\\\", type(None))\\n\",\n    \"    types_list = list(types.values())\\n\",\n    \"    # @app.consumer does not return a value\\n\",\n    \"    if return_type != type(None):\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"Consumer function cannot return any value, got {return_type}\\\"\\n\",\n    \"        )\\n\",\n    \"    # @app.consumer first consumer argument must be a msg which is a subclass of BaseModel\\n\",\n    \"    try:\\n\",\n    \"        msg_type = types_list[0]\\n\",\n    \"\\n\",\n    \"        msg_type = unwrap_list_type(msg_type)\\n\",\n    \"\\n\",\n    \"        if not issubclass(msg_type, BaseModel):\\n\",\n    \"            raise ValueError(\\n\",\n    \"                f\\\"Consumer function first param must be a BaseModel subclass msg, got {types_list}\\\"\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"        return msg_type  # type: ignore\\n\",\n    \"\\n\",\n    \"    except IndexError:\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"Consumer function first param must be a BaseModel subclass msg, got {types_list}\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"eb801415\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"__main__.MyMsgUrl\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"expected = MyMsgUrl\\n\",\n    \"actual = _get_msg_cls_for_consumer(on_my_topic_one)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0d0625f2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"__main__.MyMsgEmail\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"expected = MyMsgEmail\\n\",\n    \"actual = _get_msg_cls_for_consumer(on_my_topic_2_meta)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d701a5c9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"__main__.MyMsgEmail\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"expected = MyMsgEmail\\n\",\n    \"actual = _get_msg_cls_for_consumer(on_my_topic_2_batch)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2bcf415f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def no_input():\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    _get_msg_cls_for_consumer(no_input)\\n\",\n    \"\\n\",\n    \"assert e.value.args == (\\n\",\n    \"    \\\"Consumer function first param must be a BaseModel subclass msg, got []\\\",\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def has_return(a: int) -> int:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    _get_msg_cls_for_consumer(has_return)\\n\",\n    \"\\n\",\n    \"assert e.value.args == (\\\"Consumer function cannot return any value, got <class 'int'>\\\",)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fea7bc8c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# |export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_topic_dict(\\n\",\n    \"    f: Callable[[Any], Any],\\n\",\n    \"    direction: str = \\\"publish\\\",\\n\",\n    \") -> Dict[str, Any]:\\n\",\n    \"    if not direction in [\\\"publish\\\", \\\"subscribe\\\"]:\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"direction must be one of ['publish', 'subscribe'], but it is '{direction}'.\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    #     msg_cls = None\\n\",\n    \"\\n\",\n    \"    if direction == \\\"publish\\\":\\n\",\n    \"        msg_cls = _get_msg_cls_for_producer(f)\\n\",\n    \"    elif direction == \\\"subscribe\\\":\\n\",\n    \"        msg_cls = _get_msg_cls_for_consumer(f)\\n\",\n    \"\\n\",\n    \"    msg_schema = {\\\"message\\\": {\\\"$ref\\\": f\\\"#/components/messages/{msg_cls.__name__}\\\"}}\\n\",\n    \"    if hasattr(f, \\\"description\\\"):\\n\",\n    \"        msg_schema[\\\"description\\\"] = getattr(f, \\\"description\\\")\\n\",\n    \"    elif f.__doc__ is not None:\\n\",\n    \"        msg_schema[\\\"description\\\"] = f.__doc__  # type: ignore\\n\",\n    \"    return {direction: msg_schema}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"51c6a834\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'subscribe'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}}}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\u001b[32m'subscribe'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\\"subscribe\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"}}}\\n\",\n    \"\\n\",\n    \"actual = _get_topic_dict(on_my_topic_2, \\\"subscribe\\\")\\n\",\n    \"pprint(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"41194f50\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}}}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\\"publish\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"}}}\\n\",\n    \"\\n\",\n    \"actual = _get_topic_dict(to_my_topic_4, \\\"publish\\\")\\n\",\n    \"pprint(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"94de7f46\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Topic description'</span><span style=\\\"font-weight: bold\\\">}}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m, \\u001b[32m'description'\\u001b[0m: \\u001b[32m'Topic description'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"publish\\\": {\\n\",\n    \"        \\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"},\\n\",\n    \"        \\\"description\\\": \\\"Topic description\\\",\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"setattr(to_my_topic_4, \\\"description\\\", \\\"Topic description\\\")\\n\",\n    \"\\n\",\n    \"actual = _get_topic_dict(to_my_topic_4, \\\"publish\\\")\\n\",\n    \"pprint(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"765e4151\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_channels_schema(\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \") -> Dict[str, Dict[str, Dict[str, Any]]]:\\n\",\n    \"    topics = {}\\n\",\n    \"    for ms, d in zip([consumers, producers], [\\\"subscribe\\\", \\\"publish\\\"]):\\n\",\n    \"        for topic, f in ms.items():  # type: ignore\\n\",\n    \"            topics[topic] = _get_topic_dict(f, d)\\n\",\n    \"    return topics\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"36d6ec40\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_1'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'subscribe'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgUrl'</span><span style=\\\"font-weight: bold\\\">}}}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_2'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'subscribe'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}}}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_3'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgUrl'</span><span style=\\\"font-weight: bold\\\">}}}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_4'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Topic description'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_5'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}}}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'my_topic_1'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'subscribe'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgUrl'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'my_topic_2'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'subscribe'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'my_topic_3'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgUrl'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'my_topic_4'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m, \\u001b[32m'description'\\u001b[0m: \\u001b[32m'Topic description'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'my_topic_5'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"my_topic_1\\\": {\\n\",\n    \"        \\\"subscribe\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgUrl\\\"}}\\n\",\n    \"    },\\n\",\n    \"    \\\"my_topic_2\\\": {\\n\",\n    \"        \\\"subscribe\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"}}\\n\",\n    \"    },\\n\",\n    \"    \\\"my_topic_3\\\": {\\\"publish\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgUrl\\\"}}},\\n\",\n    \"    \\\"my_topic_4\\\": {\\n\",\n    \"        \\\"publish\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"}, 'description': 'Topic description'}\\n\",\n    \"    },\\n\",\n    \"    \\\"my_topic_5\\\": {\\n\",\n    \"        \\\"publish\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"}}\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"actual = _get_channels_schema(consumers, producers)\\n\",\n    \"pprint(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0933f70b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_kafka_msg_classes(\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \") -> Set[Type[BaseModel]]:\\n\",\n    \"    fc = [_get_msg_cls_for_consumer(consumer) for consumer in consumers.values()]\\n\",\n    \"    fp = [_get_msg_cls_for_producer(producer) for producer in producers.values()]\\n\",\n    \"    return set(fc + fp)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_kafka_msg_definitions(\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \") -> Dict[str, Dict[str, Any]]:\\n\",\n    \"    msg_classes = _get_kafka_msg_classes(consumers, producers)\\n\",\n    \"    _, msg_definitions = TypeAdapter.json_schemas([(msg_cls, \\\"validation\\\", TypeAdapter(msg_cls)) for msg_cls in msg_classes])\\n\",\n    \"    return msg_definitions\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1aa371c7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"expected = {\\n\",\n    \"    \\\"$defs\\\": {\\n\",\n    \"        \\\"MyInfo\\\": {\\n\",\n    \"            \\\"properties\\\": {\\n\",\n    \"                \\\"mobile\\\": {\\n\",\n    \"                    \\\"example\\\": \\\"+385987654321\\\",\\n\",\n    \"                    \\\"title\\\": \\\"Mobile\\\",\\n\",\n    \"                    \\\"type\\\": \\\"string\\\",\\n\",\n    \"                },\\n\",\n    \"                \\\"name\\\": {\\\"example\\\": \\\"James Bond\\\", \\\"title\\\": \\\"Name\\\", \\\"type\\\": \\\"string\\\"},\\n\",\n    \"            },\\n\",\n    \"            \\\"required\\\": [\\\"mobile\\\", \\\"name\\\"],\\n\",\n    \"            \\\"title\\\": \\\"MyInfo\\\",\\n\",\n    \"            \\\"type\\\": \\\"object\\\",\\n\",\n    \"        },\\n\",\n    \"        \\\"MyMsgEmail\\\": {\\n\",\n    \"            \\\"properties\\\": {\\n\",\n    \"                \\\"msg_url\\\": {\\n\",\n    \"                    \\\"allOf\\\": [{\\\"$ref\\\": \\\"#/$defs/MyMsgUrl\\\"}],\\n\",\n    \"                    \\\"example\\\": {\\n\",\n    \"                        \\\"info\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                        \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                    },\\n\",\n    \"                },\\n\",\n    \"                \\\"email\\\": {\\n\",\n    \"                    \\\"example\\\": \\\"agent-007@sis.gov.uk\\\",\\n\",\n    \"                    \\\"format\\\": \\\"email\\\",\\n\",\n    \"                    \\\"title\\\": \\\"Email\\\",\\n\",\n    \"                    \\\"type\\\": \\\"string\\\",\\n\",\n    \"                },\\n\",\n    \"            },\\n\",\n    \"            \\\"required\\\": [\\\"msg_url\\\", \\\"email\\\"],\\n\",\n    \"            \\\"title\\\": \\\"MyMsgEmail\\\",\\n\",\n    \"            \\\"type\\\": \\\"object\\\",\\n\",\n    \"        },\\n\",\n    \"        \\\"MyMsgUrl\\\": {\\n\",\n    \"            \\\"properties\\\": {\\n\",\n    \"                \\\"info\\\": {\\n\",\n    \"                    \\\"allOf\\\": [{\\\"$ref\\\": \\\"#/$defs/MyInfo\\\"}],\\n\",\n    \"                    \\\"example\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                },\\n\",\n    \"                \\\"url\\\": {\\n\",\n    \"                    \\\"example\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                    \\\"format\\\": \\\"uri\\\",\\n\",\n    \"                    \\\"maxLength\\\": 2083,\\n\",\n    \"                    \\\"minLength\\\": 1,\\n\",\n    \"                    \\\"title\\\": \\\"Url\\\",\\n\",\n    \"                    \\\"type\\\": \\\"string\\\",\\n\",\n    \"                },\\n\",\n    \"            },\\n\",\n    \"            \\\"required\\\": [\\\"info\\\", \\\"url\\\"],\\n\",\n    \"            \\\"title\\\": \\\"MyMsgUrl\\\",\\n\",\n    \"            \\\"type\\\": \\\"object\\\",\\n\",\n    \"        },\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"msg_definitions = _get_kafka_msg_definitions(consumers, producers)\\n\",\n    \"assert msg_definitions == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"51b8eca0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_example(cls: Type[BaseModel]) -> BaseModel:\\n\",\n    \"    kwargs: Dict[str, Any] = {}\\n\",\n    \"    for k, v in cls.model_fields.items():\\n\",\n    \"        #         try:\\n\",\n    \"        if hasattr(v, \\\"json_schema_extra\\\") and \\\"example\\\" in v.json_schema_extra:  # type: ignore\\n\",\n    \"            example = v.json_schema_extra[\\\"example\\\"]  # type: ignore\\n\",\n    \"            kwargs[k] = example\\n\",\n    \"    #         except:\\n\",\n    \"    #             pass\\n\",\n    \"    return json.loads(cls(**kwargs).model_dump_json())  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"248bcba6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'agent-007@sis.gov.uk'</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'msg_url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'email'\\u001b[0m: \\u001b[32m'agent-007@sis.gov.uk'\\u001b[0m\\n\",\n       \"\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"msg_url\\\": {\\n\",\n    \"        \\\"info\\\": {\\\"name\\\": \\\"James Bond\\\", \\\"mobile\\\": \\\"+385987654321\\\"},\\n\",\n    \"        \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"    },\\n\",\n    \"    \\\"email\\\": \\\"agent-007@sis.gov.uk\\\",\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"actual = _get_example(MyMsgEmail)\\n\",\n    \"pprint(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d9f7df99\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _add_example_to_msg_definitions(\\n\",\n    \"    msg_cls: Type[BaseModel], msg_schema: Dict[str, Dict[str, Any]]\\n\",\n    \") -> None:\\n\",\n    \"    try:\\n\",\n    \"        example = _get_example(msg_cls)\\n\",\n    \"    except Exception as e:\\n\",\n    \"        example = None\\n\",\n    \"    if example is not None:\\n\",\n    \"        msg_schema[\\\"$defs\\\"][msg_cls.__name__][\\\"example\\\"] = example\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_msg_definitions_with_examples(\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \") -> Dict[str, Dict[str, Any]]:\\n\",\n    \"    msg_classes = _get_kafka_msg_classes(consumers, producers)\\n\",\n    \"    msg_schema: Dict[str, Dict[str, Any]]\\n\",\n    \"    _, msg_schema = TypeAdapter.json_schemas([(msg_cls, \\\"validation\\\", TypeAdapter(msg_cls)) for msg_cls in msg_classes])\\n\",\n    \"    for msg_cls in msg_classes:\\n\",\n    \"        _add_example_to_msg_definitions(msg_cls, msg_schema)\\n\",\n    \"    msg_schema = (\\n\",\n    \"        {k: {\\\"payload\\\": v} for k, v in msg_schema[\\\"$defs\\\"].items()}\\n\",\n    \"        if \\\"$defs\\\" in msg_schema\\n\",\n    \"        else {}\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    return msg_schema\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"841e0eb0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyInfo'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Mobile'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Name'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyInfo'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgEmail'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'allOf'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/$defs/MyMsgUrl'</span><span style=\\\"font-weight: bold\\\">}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'agent-007@sis.gov.uk'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'format'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Email'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgEmail'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'agent-007@sis.gov.uk'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgUrl'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'allOf'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/$defs/MyInfo'</span><span style=\\\"font-weight: bold\\\">}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'format'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'uri'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'maxLength'</span>: <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">2083</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'minLength'</span>: <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">1</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Url'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgUrl'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'MyInfo'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'title'\\u001b[0m: \\u001b[32m'Mobile'\\u001b[0m, \\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'name'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m, \\u001b[32m'title'\\u001b[0m: \\u001b[32m'Name'\\u001b[0m, \\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'mobile'\\u001b[0m, \\u001b[32m'name'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyInfo'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'MyMsgEmail'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'msg_url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'allOf'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/$defs/MyMsgUrl'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'email'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'agent-007@sis.gov.uk'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'format'\\u001b[0m: \\u001b[32m'email'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'Email'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'msg_url'\\u001b[0m, \\u001b[32m'email'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyMsgEmail'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'msg_url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'email'\\u001b[0m: \\u001b[32m'agent-007@sis.gov.uk'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'MyMsgUrl'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'allOf'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/$defs/MyInfo'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'format'\\u001b[0m: \\u001b[32m'uri'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'maxLength'\\u001b[0m: \\u001b[1;36m2083\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'minLength'\\u001b[0m: \\u001b[1;36m1\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'Url'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'info'\\u001b[0m, \\u001b[32m'url'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyMsgUrl'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"MyInfo\\\": {\\n\",\n    \"        \\\"payload\\\": {\\n\",\n    \"            \\\"properties\\\": {\\n\",\n    \"                \\\"mobile\\\": {\\n\",\n    \"                    \\\"example\\\": \\\"+385987654321\\\",\\n\",\n    \"                    \\\"title\\\": \\\"Mobile\\\",\\n\",\n    \"                    \\\"type\\\": \\\"string\\\",\\n\",\n    \"                },\\n\",\n    \"                \\\"name\\\": {\\\"example\\\": \\\"James Bond\\\", \\\"title\\\": \\\"Name\\\", \\\"type\\\": \\\"string\\\"},\\n\",\n    \"            },\\n\",\n    \"            \\\"required\\\": [\\\"mobile\\\", \\\"name\\\"],\\n\",\n    \"            \\\"title\\\": \\\"MyInfo\\\",\\n\",\n    \"            \\\"type\\\": \\\"object\\\",\\n\",\n    \"        }\\n\",\n    \"    },\\n\",\n    \"    \\\"MyMsgEmail\\\": {\\n\",\n    \"        \\\"payload\\\": {\\n\",\n    \"            \\\"properties\\\": {\\n\",\n    \"                \\\"msg_url\\\": {\\n\",\n    \"                    \\\"allOf\\\": [{\\\"$ref\\\": \\\"#/$defs/MyMsgUrl\\\"}],\\n\",\n    \"                    \\\"example\\\": {\\n\",\n    \"                        \\\"info\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                        \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                    },\\n\",\n    \"                },\\n\",\n    \"                \\\"email\\\": {\\n\",\n    \"                    \\\"example\\\": \\\"agent-007@sis.gov.uk\\\",\\n\",\n    \"                    \\\"format\\\": \\\"email\\\",\\n\",\n    \"                    \\\"title\\\": \\\"Email\\\",\\n\",\n    \"                    \\\"type\\\": \\\"string\\\",\\n\",\n    \"                },\\n\",\n    \"            },\\n\",\n    \"            \\\"required\\\": [\\\"msg_url\\\", \\\"email\\\"],\\n\",\n    \"            \\\"title\\\": \\\"MyMsgEmail\\\",\\n\",\n    \"            \\\"type\\\": \\\"object\\\",\\n\",\n    \"            \\\"example\\\": {\\n\",\n    \"                \\\"msg_url\\\": {\\n\",\n    \"                    \\\"info\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                    \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                },\\n\",\n    \"                \\\"email\\\": \\\"agent-007@sis.gov.uk\\\",\\n\",\n    \"            },\\n\",\n    \"        }\\n\",\n    \"    },\\n\",\n    \"    \\\"MyMsgUrl\\\": {\\n\",\n    \"        \\\"payload\\\": {\\n\",\n    \"            \\\"properties\\\": {\\n\",\n    \"                \\\"info\\\": {\\n\",\n    \"                    \\\"allOf\\\": [{\\\"$ref\\\": \\\"#/$defs/MyInfo\\\"}],\\n\",\n    \"                    \\\"example\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                },\\n\",\n    \"                \\\"url\\\": {\\n\",\n    \"                    \\\"example\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                    \\\"format\\\": \\\"uri\\\",\\n\",\n    \"                    \\\"maxLength\\\": 2083,\\n\",\n    \"                    \\\"minLength\\\": 1,\\n\",\n    \"                    \\\"title\\\": \\\"Url\\\",\\n\",\n    \"                    \\\"type\\\": \\\"string\\\",\\n\",\n    \"                },\\n\",\n    \"            },\\n\",\n    \"            \\\"required\\\": [\\\"info\\\", \\\"url\\\"],\\n\",\n    \"            \\\"title\\\": \\\"MyMsgUrl\\\",\\n\",\n    \"            \\\"type\\\": \\\"object\\\",\\n\",\n    \"            \\\"example\\\": {\\n\",\n    \"                \\\"info\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"            },\\n\",\n    \"        }\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"actual = _get_msg_definitions_with_examples(consumers, producers)\\n\",\n    \"pprint(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"37bfd1fe\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_security_schemes(kafka_brokers: KafkaBrokers) -> Dict[str, Any]:\\n\",\n    \"    security_schemes = {}\\n\",\n    \"    for key, broker in kafka_brokers.brokers.items():\\n\",\n    \"        if isinstance(broker, list):\\n\",\n    \"            kafka_broker = broker[0]\\n\",\n    \"        else:\\n\",\n    \"            kafka_broker = broker\\n\",\n    \"\\n\",\n    \"        if kafka_broker.security is not None:\\n\",\n    \"            security_schemes[f\\\"{key}_default_security\\\"] = json.loads(\\n\",\n    \"                kafka_broker.security.model_dump_json()\\n\",\n    \"            )\\n\",\n    \"    return security_schemes\"\n   ]\n  },\n  {\n   \"cell_type\": \"raw\",\n   \"id\": \"13132265\",\n   \"metadata\": {},\n   \"source\": [\n    \"expected = {\\n\",\n    \"    \\\"staging_default_security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    \\\"production_default_security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"brokers = KafkaBrokers(\\n\",\n    \"    brokers={\\n\",\n    \"        \\\"dev\\\": KafkaBroker(\\n\",\n    \"            url=\\\"localhost\\\",\\n\",\n    \"            description=\\\"dev\\\",\\n\",\n    \"            port=\\\"9092\\\",\\n\",\n    \"        )\\n\",\n    \"    }\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"display(brokers)\\n\",\n    \"\\n\",\n    \"actual = _get_security_schemes(brokers)\\n\",\n    \"pprint(actual)\\n\",\n    \"assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"eeb0e316\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_components_schema(\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \"    kafka_brokers: KafkaBrokers,\\n\",\n    \") -> Dict[str, Any]:\\n\",\n    \"    definitions = _get_msg_definitions_with_examples(consumers, producers)\\n\",\n    \"    msg_classes = [cls.__name__ for cls in _get_kafka_msg_classes(consumers, producers)]\\n\",\n    \"    components = {\\n\",\n    \"        \\\"messages\\\": {k: v for k, v in definitions.items() if k in msg_classes},\\n\",\n    \"        \\\"schemas\\\": {k: v for k, v in definitions.items() if k not in msg_classes},\\n\",\n    \"        \\\"securitySchemes\\\": _get_security_schemes(kafka_brokers),\\n\",\n    \"    }\\n\",\n    \"    substitutions = {\\n\",\n    \"        f\\\"#/$defs/{k}\\\": f\\\"#/components/messages/{k}\\\"\\n\",\n    \"        if k in msg_classes\\n\",\n    \"        else f\\\"#/components/schemas/{k}\\\"\\n\",\n    \"        for k in definitions.keys()\\n\",\n    \"    }\\n\",\n    \"\\n\",\n    \"    def _sub_values(d: Any, substitutions: Dict[str, str] = substitutions) -> Any:\\n\",\n    \"        if isinstance(d, dict):\\n\",\n    \"            d = {k: _sub_values(v) for k, v in d.items()}\\n\",\n    \"        if isinstance(d, list):\\n\",\n    \"            d = [_sub_values(k) for k in d]\\n\",\n    \"        elif isinstance(d, str):\\n\",\n    \"            for k, v in substitutions.items():\\n\",\n    \"                if d == k:\\n\",\n    \"                    d = v\\n\",\n    \"        return d\\n\",\n    \"\\n\",\n    \"    return _sub_values(components)  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"df0128e3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'messages'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgEmail'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'allOf'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgUrl'</span><span style=\\\"font-weight: bold\\\">}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'agent-007@sis.gov.uk'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'format'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Email'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgEmail'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'agent-007@sis.gov.uk'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgUrl'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'allOf'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/schemas/MyInfo'</span><span style=\\\"font-weight: bold\\\">}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'format'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'uri'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'maxLength'</span>: <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">2083</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'minLength'</span>: <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">1</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Url'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgUrl'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'schemas'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyInfo'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Mobile'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Name'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyInfo'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'securitySchemes'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging_default_security'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">}}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'messages'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'MyMsgEmail'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'msg_url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'allOf'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgUrl'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'email'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'agent-007@sis.gov.uk'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'format'\\u001b[0m: \\u001b[32m'email'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'Email'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'msg_url'\\u001b[0m, \\u001b[32m'email'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyMsgEmail'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'msg_url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'email'\\u001b[0m: \\u001b[32m'agent-007@sis.gov.uk'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'MyMsgUrl'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'allOf'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/schemas/MyInfo'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'format'\\u001b[0m: \\u001b[32m'uri'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'maxLength'\\u001b[0m: \\u001b[1;36m2083\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'minLength'\\u001b[0m: \\u001b[1;36m1\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'Url'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'info'\\u001b[0m, \\u001b[32m'url'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyMsgUrl'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'schemas'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'MyInfo'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'title'\\u001b[0m: \\u001b[32m'Mobile'\\u001b[0m, \\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'name'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m, \\u001b[32m'title'\\u001b[0m: \\u001b[32m'Name'\\u001b[0m, \\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'mobile'\\u001b[0m, \\u001b[32m'name'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyInfo'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'securitySchemes'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'staging_default_security'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'plain'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"components = _get_components_schema(consumers, producers, kafka_brokers)\\n\",\n    \"pprint(components)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f4debb61\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_servers_schema(kafka_brokers: KafkaBrokers) -> Dict[str, Any]:\\n\",\n    \"    servers = json.loads(kafka_brokers.model_dump_json(sort_keys=False))[\\\"brokers\\\"]\\n\",\n    \"\\n\",\n    \"    for key, kafka_broker in servers.items():\\n\",\n    \"        if \\\"security\\\" in kafka_broker:\\n\",\n    \"            servers[key][\\\"security\\\"] = [{f\\\"{key}_default_security\\\": []}]\\n\",\n    \"    return servers  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"486f09df\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'dev'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'protocol'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'variables'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'port'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'default'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span><span style=\\\"font-weight: bold\\\">}}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'protocol'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka-secure'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'security'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging_default_security'</span>: <span style=\\\"font-weight: bold\\\">[]}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'variables'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'port'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'default'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span><span style=\\\"font-weight: bold\\\">}}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'dev'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'description'\\u001b[0m: \\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'protocol'\\u001b[0m: \\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'variables'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'port'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'default'\\u001b[0m: \\u001b[32m'9092'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'staging'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'description'\\u001b[0m: \\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'protocol'\\u001b[0m: \\u001b[32m'kafka-secure'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'security'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'staging_default_security'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m]\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'variables'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'port'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'default'\\u001b[0m: \\u001b[32m'9092'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"dev\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka\\\",\\n\",\n    \"        \\\"description\\\": \\\"Kafka broker\\\",\\n\",\n    \"        \\\"protocol\\\": \\\"kafka\\\",\\n\",\n    \"        \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}},\\n\",\n    \"    },\\n\",\n    \"    \\\"staging\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka\\\",\\n\",\n    \"        \\\"description\\\": \\\"Kafka broker\\\",\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": [{\\\"staging_default_security\\\": []}],\\n\",\n    \"        \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"actual = _get_servers_schema(kafka_brokers)\\n\",\n    \"pprint(actual)\\n\",\n    \"assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"65365b1d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_asyncapi_schema(\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \"    kafka_brokers: KafkaBrokers,\\n\",\n    \"    kafka_service_info: KafkaServiceInfo,\\n\",\n    \") -> Dict[str, Any]:\\n\",\n    \"    #     # we don't use dict because we need custom JSON encoders\\n\",\n    \"    info = json.loads(kafka_service_info.model_dump_json())\\n\",\n    \"    servers = _get_servers_schema(kafka_brokers)\\n\",\n    \"    #     # should be in the proper format already\\n\",\n    \"    channels = _get_channels_schema(consumers, producers)\\n\",\n    \"    components = _get_components_schema(consumers, producers, kafka_brokers)\\n\",\n    \"    return {\\n\",\n    \"        \\\"asyncapi\\\": \\\"2.5.0\\\",\\n\",\n    \"        \\\"info\\\": info,\\n\",\n    \"        \\\"servers\\\": servers,\\n\",\n    \"        \\\"channels\\\": channels,\\n\",\n    \"        \\\"components\\\": components,\\n\",\n    \"    }\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"52fa12e4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'asyncapi'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'2.5.0'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Title'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'version'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'0.0.1'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Description of the service'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'contact'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'ACME'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://www.acme.com/'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'noreply@acme.com'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'servers'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'dev'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'protocol'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'variables'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'port'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'default'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span><span style=\\\"font-weight: bold\\\">}}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Kafka broker'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'protocol'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'kafka-secure'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'security'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging_default_security'</span>: <span style=\\\"font-weight: bold\\\">[]}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'variables'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'port'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'default'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'9092'</span><span style=\\\"font-weight: bold\\\">}}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'channels'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_1'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'subscribe'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgUrl'</span><span style=\\\"font-weight: bold\\\">}}}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_2'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'subscribe'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}}}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_3'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgUrl'</span><span style=\\\"font-weight: bold\\\">}}}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_4'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'description'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Topic description'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'my_topic_5'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'publish'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'message'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgEmail'</span><span style=\\\"font-weight: bold\\\">}}}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'components'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'messages'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgEmail'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'allOf'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/messages/MyMsgUrl'</span><span style=\\\"font-weight: bold\\\">}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'agent-007@sis.gov.uk'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'format'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Email'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgEmail'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'msg_url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'email'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'agent-007@sis.gov.uk'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgUrl'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'allOf'</span>: <span style=\\\"font-weight: bold\\\">[{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'$ref'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'#/components/schemas/MyInfo'</span><span style=\\\"font-weight: bold\\\">}]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'format'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'uri'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'maxLength'</span>: <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">2083</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'minLength'</span>: <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">1</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Url'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyMsgUrl'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'info'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'url'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'https://sis.gov.uk/agents/007'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'schemas'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyInfo'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'payload'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'properties'</span>: <span style=\\\"font-weight: bold\\\">{</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'+385987654321'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Mobile'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'example'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'James Bond'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'Name'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'string'</span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'required'</span>: <span style=\\\"font-weight: bold\\\">[</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'mobile'</span>, <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'name'</span><span style=\\\"font-weight: bold\\\">]</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'title'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'MyInfo'</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'object'</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   │   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"font-weight: bold\\\">}</span>,\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   │   </span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'securitySchemes'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'staging_default_security'</span>: <span style=\\\"font-weight: bold\\\">{</span><span style=\\\"color: #008000; text-decoration-color: #008000\\\">'type'</span>: <span style=\\\"color: #008000; text-decoration-color: #008000\\\">'plain'</span><span style=\\\"font-weight: bold\\\">}}</span>\\n\",\n       \"<span style=\\\"color: #7fbf7f; text-decoration-color: #7fbf7f\\\">│   </span><span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">}</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'asyncapi'\\u001b[0m: \\u001b[32m'2.5.0'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'Title'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'version'\\u001b[0m: \\u001b[32m'0.0.1'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'description'\\u001b[0m: \\u001b[32m'Description of the service'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'contact'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'name'\\u001b[0m: \\u001b[32m'ACME'\\u001b[0m, \\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://www.acme.com/'\\u001b[0m, \\u001b[32m'email'\\u001b[0m: \\u001b[32m'noreply@acme.com'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'servers'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'dev'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'description'\\u001b[0m: \\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'protocol'\\u001b[0m: \\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'variables'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'port'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'default'\\u001b[0m: \\u001b[32m'9092'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'staging'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'kafka'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'description'\\u001b[0m: \\u001b[32m'Kafka broker'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'protocol'\\u001b[0m: \\u001b[32m'kafka-secure'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'security'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'staging_default_security'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m]\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'variables'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'port'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'default'\\u001b[0m: \\u001b[32m'9092'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'channels'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_1'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'subscribe'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgUrl'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_2'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'subscribe'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_3'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgUrl'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_4'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'description'\\u001b[0m: \\u001b[32m'Topic description'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'my_topic_5'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'publish'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'message'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgEmail'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[32m'components'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'messages'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'MyMsgEmail'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'msg_url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'allOf'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/messages/MyMsgUrl'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'email'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'agent-007@sis.gov.uk'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'format'\\u001b[0m: \\u001b[32m'email'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'Email'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'msg_url'\\u001b[0m, \\u001b[32m'email'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyMsgEmail'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'msg_url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'email'\\u001b[0m: \\u001b[32m'agent-007@sis.gov.uk'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'MyMsgUrl'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'allOf'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[1m{\\u001b[0m\\u001b[32m'$ref'\\u001b[0m: \\u001b[32m'#/components/schemas/MyInfo'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'format'\\u001b[0m: \\u001b[32m'uri'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'maxLength'\\u001b[0m: \\u001b[1;36m2083\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'minLength'\\u001b[0m: \\u001b[1;36m1\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'Url'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'info'\\u001b[0m, \\u001b[32m'url'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyMsgUrl'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'info'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'name'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'url'\\u001b[0m: \\u001b[32m'https://sis.gov.uk/agents/007'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'schemas'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[32m'MyInfo'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[32m'payload'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'properties'\\u001b[0m: \\u001b[1m{\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'mobile'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'+385987654321'\\u001b[0m, \\u001b[32m'title'\\u001b[0m: \\u001b[32m'Mobile'\\u001b[0m, \\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   │   \\u001b[0m\\u001b[32m'name'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'example'\\u001b[0m: \\u001b[32m'James Bond'\\u001b[0m, \\u001b[32m'title'\\u001b[0m: \\u001b[32m'Name'\\u001b[0m, \\u001b[32m'type'\\u001b[0m: \\u001b[32m'string'\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'required'\\u001b[0m: \\u001b[1m[\\u001b[0m\\u001b[32m'mobile'\\u001b[0m, \\u001b[32m'name'\\u001b[0m\\u001b[1m]\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'title'\\u001b[0m: \\u001b[32m'MyInfo'\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   │   │   │   \\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'object'\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   │   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[1m}\\u001b[0m,\\n\",\n       \"\\u001b[2;32m│   │   \\u001b[0m\\u001b[32m'securitySchemes'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'staging_default_security'\\u001b[0m: \\u001b[1m{\\u001b[0m\\u001b[32m'type'\\u001b[0m: \\u001b[32m'plain'\\u001b[0m\\u001b[1m}\\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[2;32m│   \\u001b[0m\\u001b[1m}\\u001b[0m\\n\",\n       \"\\u001b[1m}\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | output: false\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"asyncapi\\\": \\\"2.5.0\\\",\\n\",\n    \"    \\\"info\\\": {\\n\",\n    \"        \\\"title\\\": \\\"Title\\\",\\n\",\n    \"        \\\"version\\\": \\\"0.0.1\\\",\\n\",\n    \"        \\\"description\\\": \\\"Description of the service\\\",\\n\",\n    \"        \\\"contact\\\": {\\n\",\n    \"            \\\"name\\\": \\\"ACME\\\",\\n\",\n    \"            \\\"url\\\": \\\"https://www.acme.com/\\\",\\n\",\n    \"            \\\"email\\\": \\\"noreply@acme.com\\\",\\n\",\n    \"        },\\n\",\n    \"    },\\n\",\n    \"    \\\"servers\\\": {\\n\",\n    \"        \\\"dev\\\": {\\n\",\n    \"            \\\"url\\\": \\\"kafka\\\",\\n\",\n    \"            \\\"description\\\": \\\"Kafka broker\\\",\\n\",\n    \"            \\\"protocol\\\": \\\"kafka\\\",\\n\",\n    \"            \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}},\\n\",\n    \"        },\\n\",\n    \"        \\\"staging\\\": {\\n\",\n    \"            \\\"url\\\": \\\"kafka\\\",\\n\",\n    \"            \\\"description\\\": \\\"Kafka broker\\\",\\n\",\n    \"            \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"            \\\"security\\\": [{\\\"staging_default_security\\\": []}],\\n\",\n    \"            \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}},\\n\",\n    \"        },\\n\",\n    \"    },\\n\",\n    \"    \\\"channels\\\": {\\n\",\n    \"        \\\"my_topic_1\\\": {\\n\",\n    \"            \\\"subscribe\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgUrl\\\"}}\\n\",\n    \"        },\\n\",\n    \"        \\\"my_topic_2\\\": {\\n\",\n    \"            \\\"subscribe\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"}}\\n\",\n    \"        },\\n\",\n    \"        \\\"my_topic_3\\\": {\\n\",\n    \"            \\\"publish\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgUrl\\\"}}\\n\",\n    \"        },\\n\",\n    \"        \\\"my_topic_4\\\": {\\n\",\n    \"            \\\"publish\\\": {\\n\",\n    \"                \\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"},\\n\",\n    \"                \\\"description\\\": \\\"Topic description\\\",\\n\",\n    \"            }\\n\",\n    \"        },\\n\",\n    \"        \\\"my_topic_5\\\": {\\n\",\n    \"            \\\"publish\\\": {\\\"message\\\": {\\\"$ref\\\": \\\"#/components/messages/MyMsgEmail\\\"}}\\n\",\n    \"        },\\n\",\n    \"    },\\n\",\n    \"    \\\"components\\\": {\\n\",\n    \"        \\\"messages\\\": {\\n\",\n    \"            \\\"MyMsgEmail\\\": {\\n\",\n    \"                \\\"payload\\\": {\\n\",\n    \"                    \\\"properties\\\": {\\n\",\n    \"                        \\\"msg_url\\\": {\\n\",\n    \"                            \\\"allOf\\\": [{\\\"$ref\\\": \\\"#/components/messages/MyMsgUrl\\\"}],\\n\",\n    \"                            \\\"example\\\": {\\n\",\n    \"                                \\\"info\\\": {\\n\",\n    \"                                    \\\"mobile\\\": \\\"+385987654321\\\",\\n\",\n    \"                                    \\\"name\\\": \\\"James Bond\\\",\\n\",\n    \"                                },\\n\",\n    \"                                \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                            },\\n\",\n    \"                        },\\n\",\n    \"                        \\\"email\\\": {\\n\",\n    \"                            \\\"example\\\": \\\"agent-007@sis.gov.uk\\\",\\n\",\n    \"                            \\\"format\\\": \\\"email\\\",\\n\",\n    \"                            \\\"title\\\": \\\"Email\\\",\\n\",\n    \"                            \\\"type\\\": \\\"string\\\",\\n\",\n    \"                        },\\n\",\n    \"                    },\\n\",\n    \"                    \\\"required\\\": [\\\"msg_url\\\", \\\"email\\\"],\\n\",\n    \"                    \\\"title\\\": \\\"MyMsgEmail\\\",\\n\",\n    \"                    \\\"type\\\": \\\"object\\\",\\n\",\n    \"                    \\\"example\\\": {\\n\",\n    \"                        \\\"msg_url\\\": {\\n\",\n    \"                            \\\"info\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                            \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                        },\\n\",\n    \"                        \\\"email\\\": \\\"agent-007@sis.gov.uk\\\",\\n\",\n    \"                    },\\n\",\n    \"                }\\n\",\n    \"            },\\n\",\n    \"            \\\"MyMsgUrl\\\": {\\n\",\n    \"                \\\"payload\\\": {\\n\",\n    \"                    \\\"properties\\\": {\\n\",\n    \"                        \\\"info\\\": {\\n\",\n    \"                            \\\"allOf\\\": [{\\\"$ref\\\": \\\"#/components/schemas/MyInfo\\\"}],\\n\",\n    \"                            \\\"example\\\": {\\n\",\n    \"                                \\\"mobile\\\": \\\"+385987654321\\\",\\n\",\n    \"                                \\\"name\\\": \\\"James Bond\\\",\\n\",\n    \"                            },\\n\",\n    \"                        },\\n\",\n    \"                        \\\"url\\\": {\\n\",\n    \"                            \\\"example\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                            \\\"format\\\": \\\"uri\\\",\\n\",\n    \"                            \\\"maxLength\\\": 2083,\\n\",\n    \"                            \\\"minLength\\\": 1,\\n\",\n    \"                            \\\"title\\\": \\\"Url\\\",\\n\",\n    \"                            \\\"type\\\": \\\"string\\\",\\n\",\n    \"                        },\\n\",\n    \"                    },\\n\",\n    \"                    \\\"required\\\": [\\\"info\\\", \\\"url\\\"],\\n\",\n    \"                    \\\"title\\\": \\\"MyMsgUrl\\\",\\n\",\n    \"                    \\\"type\\\": \\\"object\\\",\\n\",\n    \"                    \\\"example\\\": {\\n\",\n    \"                        \\\"info\\\": {\\\"mobile\\\": \\\"+385987654321\\\", \\\"name\\\": \\\"James Bond\\\"},\\n\",\n    \"                        \\\"url\\\": \\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"                    },\\n\",\n    \"                }\\n\",\n    \"            },\\n\",\n    \"        },\\n\",\n    \"        \\\"schemas\\\": {\\n\",\n    \"            \\\"MyInfo\\\": {\\n\",\n    \"                \\\"payload\\\": {\\n\",\n    \"                    \\\"properties\\\": {\\n\",\n    \"                        \\\"mobile\\\": {\\n\",\n    \"                            \\\"example\\\": \\\"+385987654321\\\",\\n\",\n    \"                            \\\"title\\\": \\\"Mobile\\\",\\n\",\n    \"                            \\\"type\\\": \\\"string\\\",\\n\",\n    \"                        },\\n\",\n    \"                        \\\"name\\\": {\\n\",\n    \"                            \\\"example\\\": \\\"James Bond\\\",\\n\",\n    \"                            \\\"title\\\": \\\"Name\\\",\\n\",\n    \"                            \\\"type\\\": \\\"string\\\",\\n\",\n    \"                        },\\n\",\n    \"                    },\\n\",\n    \"                    \\\"required\\\": [\\\"mobile\\\", \\\"name\\\"],\\n\",\n    \"                    \\\"title\\\": \\\"MyInfo\\\",\\n\",\n    \"                    \\\"type\\\": \\\"object\\\",\\n\",\n    \"                }\\n\",\n    \"            }\\n\",\n    \"        },\\n\",\n    \"        \\\"securitySchemes\\\": {\\\"staging_default_security\\\": {\\\"type\\\": \\\"plain\\\"}},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"asyncapi_schema = _get_asyncapi_schema(\\n\",\n    \"    consumers, producers, kafka_brokers, kafka_service_info\\n\",\n    \")\\n\",\n    \"pprint(asyncapi_schema)\\n\",\n    \"assert asyncapi_schema == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3df0720d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# |export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def yaml_file_cmp(file_1: Union[Path, str], file_2: Union[Path, str]) -> bool:\\n\",\n    \"    \\\"\\\"\\\"Compares two YAML files and returns True if their contents are equal, False otherwise.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        file_1: Path or string representing the first YAML file.\\n\",\n    \"        file_2: Path or string representing the second YAML file.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A boolean indicating whether the contents of the two YAML files are equal.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        import yaml\\n\",\n    \"    except Exception as e:\\n\",\n    \"        msg = \\\"Please install docs version of fastkafka using 'pip install fastkafka[docs]' command\\\"\\n\",\n    \"        logger.error(msg)\\n\",\n    \"        raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"    def _read(f: Union[Path, str]) -> Dict[str, Any]:\\n\",\n    \"        with open(f) as stream:\\n\",\n    \"            return yaml.safe_load(stream)  # type: ignore\\n\",\n    \"\\n\",\n    \"    d = [_read(f) for f in [file_1, file_2]]\\n\",\n    \"    return d[0] == d[1]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b1bd1bd2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _generate_async_spec(\\n\",\n    \"    *,\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \"    kafka_brokers: KafkaBrokers,\\n\",\n    \"    kafka_service_info: KafkaServiceInfo,\\n\",\n    \"    spec_path: Path,\\n\",\n    \"    force_rebuild: bool,\\n\",\n    \") -> bool:\\n\",\n    \"    try:\\n\",\n    \"        import yaml\\n\",\n    \"    except Exception as e:\\n\",\n    \"        msg = \\\"Please install docs version of fastkafka using 'pip install fastkafka[docs]' command\\\"\\n\",\n    \"        logger.error(msg)\\n\",\n    \"        raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"    # generate spec file\\n\",\n    \"    asyncapi_schema = _get_asyncapi_schema(\\n\",\n    \"        consumers, producers, kafka_brokers, kafka_service_info\\n\",\n    \"    )\\n\",\n    \"    if not spec_path.exists():\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"Old async specifications at '{spec_path.resolve()}' does not exist.\\\"\\n\",\n    \"        )\\n\",\n    \"    spec_path.parent.mkdir(exist_ok=True, parents=True)\\n\",\n    \"    with tempfile.TemporaryDirectory() as d:\\n\",\n    \"        with open(Path(d) / \\\"asyncapi.yml\\\", \\\"w\\\") as f:\\n\",\n    \"            yaml.dump(asyncapi_schema, f, sort_keys=False)\\n\",\n    \"        spec_changed = not (\\n\",\n    \"            spec_path.exists() and yaml_file_cmp(Path(d) / \\\"asyncapi.yml\\\", spec_path)\\n\",\n    \"        )\\n\",\n    \"        if spec_changed or force_rebuild:\\n\",\n    \"            shutil.copyfile(Path(d) / \\\"asyncapi.yml\\\", spec_path)\\n\",\n    \"            logger.info(\\n\",\n    \"                f\\\"New async specifications generated at: '{spec_path.resolve()}'\\\"\\n\",\n    \"            )\\n\",\n    \"            return True\\n\",\n    \"        else:\\n\",\n    \"            logger.info(\\n\",\n    \"                f\\\"Keeping the old async specifications at: '{spec_path.resolve()}'\\\"\\n\",\n    \"            )\\n\",\n    \"            return False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c91dc3a4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\",\n      \"[INFO] __main__: Old async specifications at '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml' does not exist.\\n\",\n      \"[INFO] __main__: New async specifications generated at: '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml'\\n\",\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\",\n      \"[INFO] __main__: Keeping the old async specifications at: '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml'\\n\",\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\",\n      \"[INFO] __main__: New async specifications generated at: '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml'\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with tempfile.TemporaryDirectory() as d:\\n\",\n    \"    try:\\n\",\n    \"        asyncapi_path = Path(d).parent / \\\"003_AsyncAPI\\\" / \\\"asyncapi\\\"\\n\",\n    \"        if asyncapi_path.exists():\\n\",\n    \"            shutil.rmtree(asyncapi_path)\\n\",\n    \"        spec_path = Path(asyncapi_path) / \\\"spec\\\" / \\\"asyncapi.yml\\\"\\n\",\n    \"\\n\",\n    \"        is_spec_built = _generate_async_spec(\\n\",\n    \"            consumers=consumers,\\n\",\n    \"            producers=producers,\\n\",\n    \"            kafka_brokers=kafka_brokers,\\n\",\n    \"            kafka_service_info=kafka_service_info,\\n\",\n    \"            spec_path=spec_path,\\n\",\n    \"            force_rebuild=False,\\n\",\n    \"        )\\n\",\n    \"        assert is_spec_built\\n\",\n    \"        assert (Path(asyncapi_path) / \\\"spec\\\" / \\\"asyncapi.yml\\\").exists()\\n\",\n    \"\\n\",\n    \"        is_spec_built = _generate_async_spec(\\n\",\n    \"            consumers=consumers,\\n\",\n    \"            producers=producers,\\n\",\n    \"            kafka_brokers=kafka_brokers,\\n\",\n    \"            kafka_service_info=kafka_service_info,\\n\",\n    \"            spec_path=spec_path,\\n\",\n    \"            force_rebuild=False,\\n\",\n    \"        )\\n\",\n    \"        assert not is_spec_built\\n\",\n    \"        assert (Path(asyncapi_path) / \\\"spec\\\" / \\\"asyncapi.yml\\\").exists()\\n\",\n    \"\\n\",\n    \"        is_spec_built = _generate_async_spec(\\n\",\n    \"            consumers=consumers,\\n\",\n    \"            producers=producers,\\n\",\n    \"            kafka_brokers=kafka_brokers,\\n\",\n    \"            kafka_service_info=kafka_service_info,\\n\",\n    \"            spec_path=spec_path,\\n\",\n    \"            force_rebuild=True,\\n\",\n    \"        )\\n\",\n    \"        assert is_spec_built\\n\",\n    \"        assert (Path(asyncapi_path) / \\\"spec\\\" / \\\"asyncapi.yml\\\").exists()\\n\",\n    \"\\n\",\n    \"    finally:\\n\",\n    \"        shutil.rmtree(asyncapi_path)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ffb0e6e7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _generate_async_docs(\\n\",\n    \"    *,\\n\",\n    \"    spec_path: Path,\\n\",\n    \"    docs_path: Path,\\n\",\n    \") -> None:\\n\",\n    \"    _check_npm_with_local()\\n\",\n    \"    cmd = [\\n\",\n    \"        \\\"npx\\\",\\n\",\n    \"        \\\"-y\\\",\\n\",\n    \"        \\\"-p\\\",\\n\",\n    \"        \\\"@asyncapi/generator\\\",\\n\",\n    \"        \\\"ag\\\",\\n\",\n    \"        f\\\"{spec_path}\\\",\\n\",\n    \"        \\\"@asyncapi/html-template\\\",\\n\",\n    \"        \\\"-o\\\",\\n\",\n    \"        f\\\"{docs_path}\\\",\\n\",\n    \"        \\\"--force-write\\\",\\n\",\n    \"    ]\\n\",\n    \"    # nosemgrep: python.lang.security.audit.subprocess-shell-true.subprocess-shell-true\\n\",\n    \"    p = subprocess.run(  # nosec: B602, B603 subprocess call - check for execution of untrusted input.\\n\",\n    \"        cmd,\\n\",\n    \"        stderr=subprocess.STDOUT,\\n\",\n    \"        stdout=subprocess.PIPE,\\n\",\n    \"        shell=True if platform.system() == \\\"Windows\\\" else False,\\n\",\n    \"    )\\n\",\n    \"    if p.returncode == 0:\\n\",\n    \"        logger.info(f\\\"Async docs generated at '{docs_path}'\\\")\\n\",\n    \"        logger.info(f\\\"Output of '$ {' '.join(cmd)}'{p.stdout.decode()}\\\")\\n\",\n    \"    else:\\n\",\n    \"        logger.error(f\\\"Generation of async docs failed!\\\")\\n\",\n    \"        logger.info(f\\\"Output of '$ {' '.join(cmd)}'{p.stdout.decode()}\\\")\\n\",\n    \"        raise ValueError(\\n\",\n    \"            f\\\"Generation of async docs failed, used '$ {' '.join(cmd)}'{p.stdout.decode()}\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"07f42332\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\",\n      \"[INFO] __main__: Old async specifications at '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml' does not exist.\\n\",\n      \"[INFO] __main__: New async specifications generated at: '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml'\\n\",\n      \"[INFO] __main__: Async docs generated at '/tmp/003_AsyncAPI/asyncapi/docs'\\n\",\n      \"[INFO] __main__: Output of '$ npx -y -p @asyncapi/generator ag /tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml @asyncapi/html-template -o /tmp/003_AsyncAPI/asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/tmp/003_AsyncAPI/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with tempfile.TemporaryDirectory() as d:\\n\",\n    \"    try:\\n\",\n    \"        asyncapi_path = Path(d).parent / \\\"003_AsyncAPI\\\" / \\\"asyncapi\\\"\\n\",\n    \"        if asyncapi_path.exists():\\n\",\n    \"            shutil.rmtree(asyncapi_path)\\n\",\n    \"        spec_path = Path(asyncapi_path) / \\\"spec\\\" / \\\"asyncapi.yml\\\"\\n\",\n    \"        docs_path = Path(asyncapi_path) / \\\"docs\\\"\\n\",\n    \"\\n\",\n    \"        is_spec_built = _generate_async_spec(\\n\",\n    \"            consumers=consumers,\\n\",\n    \"            producers=producers,\\n\",\n    \"            kafka_brokers=kafka_brokers,\\n\",\n    \"            kafka_service_info=kafka_service_info,\\n\",\n    \"            spec_path=spec_path,\\n\",\n    \"            force_rebuild=False,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        _generate_async_docs(\\n\",\n    \"            spec_path=spec_path,\\n\",\n    \"            docs_path=docs_path,\\n\",\n    \"        )\\n\",\n    \"        assert docs_path.exists()\\n\",\n    \"    finally:\\n\",\n    \"        shutil.rmtree(asyncapi_path)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"238947d5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# |export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def export_async_spec(\\n\",\n    \"    *,\\n\",\n    \"    consumers: Dict[str, ConsumeCallable],\\n\",\n    \"    producers: Dict[str, ProduceCallable],\\n\",\n    \"    kafka_brokers: KafkaBrokers,\\n\",\n    \"    kafka_service_info: KafkaServiceInfo,\\n\",\n    \"    asyncapi_path: Union[Path, str],\\n\",\n    \"    force_rebuild: bool = True,\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"Exports the AsyncAPI specification and documentation to the given path.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        consumers: Dictionary of consumer functions, where the keys are the channel names and the values are the consumer functions.\\n\",\n    \"        producers: Dictionary of producer functions, where the keys are the channel names and the values are the producer functions.\\n\",\n    \"        kafka_brokers: KafkaBrokers object representing the Kafka brokers configuration.\\n\",\n    \"        kafka_service_info: KafkaServiceInfo object representing the Kafka service info configuration.\\n\",\n    \"        asyncapi_path: Path or string representing the base path where the specification and documentation will be exported.\\n\",\n    \"        force_rebuild: Boolean indicating whether to force a rebuild of the specification file even if it already exists.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    # generate spec file\\n\",\n    \"    spec_path = Path(asyncapi_path) / \\\"spec\\\" / \\\"asyncapi.yml\\\"\\n\",\n    \"    is_spec_built = _generate_async_spec(\\n\",\n    \"        consumers=consumers,\\n\",\n    \"        producers=producers,\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        kafka_service_info=kafka_service_info,\\n\",\n    \"        spec_path=spec_path,\\n\",\n    \"        force_rebuild=force_rebuild,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    # generate docs folder\\n\",\n    \"    docs_path = Path(asyncapi_path) / \\\"docs\\\"\\n\",\n    \"\\n\",\n    \"    if not is_spec_built and docs_path.exists():\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"Skipping generating async documentation in '{docs_path.resolve()}'\\\"\\n\",\n    \"        )\\n\",\n    \"        return\\n\",\n    \"\\n\",\n    \"    _generate_async_docs(\\n\",\n    \"        spec_path=spec_path,\\n\",\n    \"        docs_path=docs_path,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2403e140\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'msg_url': {'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}, 'email': 'agent-007@sis.gov.uk'}\\n\",\n      \"{'info': {'mobile': '+385987654321', 'name': 'James Bond'}, 'url': 'https://sis.gov.uk/agents/007'}\\n\",\n      \"[INFO] __main__: Old async specifications at '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml' does not exist.\\n\",\n      \"[INFO] __main__: New async specifications generated at: '/tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml'\\n\",\n      \"[INFO] __main__: Async docs generated at '/tmp/003_AsyncAPI/asyncapi/docs'\\n\",\n      \"[INFO] __main__: Output of '$ npx -y -p @asyncapi/generator ag /tmp/003_AsyncAPI/asyncapi/spec/asyncapi.yml @asyncapi/html-template -o /tmp/003_AsyncAPI/asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/tmp/003_AsyncAPI/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with tempfile.TemporaryDirectory() as d:\\n\",\n    \"    try:\\n\",\n    \"        asyncapi_path = Path(d).parent / \\\"003_AsyncAPI\\\" / \\\"asyncapi\\\"\\n\",\n    \"        if asyncapi_path.exists():\\n\",\n    \"            shutil.rmtree(asyncapi_path)\\n\",\n    \"\\n\",\n    \"        export_async_spec(\\n\",\n    \"            consumers=consumers,\\n\",\n    \"            producers=producers,\\n\",\n    \"            kafka_brokers=kafka_brokers,\\n\",\n    \"            kafka_service_info=kafka_service_info,\\n\",\n    \"            asyncapi_path=asyncapi_path,\\n\",\n    \"            force_rebuild=False,\\n\",\n    \"        )\\n\",\n    \"        #         !ls -al {asyncapi_path}\\n\",\n    \"        assert (Path(asyncapi_path) / \\\"spec\\\" / \\\"asyncapi.yml\\\").exists()\\n\",\n    \"        assert (Path(asyncapi_path) / \\\"docs\\\" / \\\"index.html\\\").exists()\\n\",\n    \"    finally:\\n\",\n    \"        shutil.rmtree(asyncapi_path)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"75939f55\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/015_FastKafka.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ff734a78\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _application.app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a5096aab\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import functools\\n\",\n    \"import inspect\\n\",\n    \"import json\\n\",\n    \"import types\\n\",\n    \"from asyncio import iscoroutinefunction  # do not use the version from inspect\\n\",\n    \"from collections import namedtuple\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"from copy import deepcopy\\n\",\n    \"from datetime import datetime, timedelta\\n\",\n    \"from functools import wraps\\n\",\n    \"from inspect import signature\\n\",\n    \"from pathlib import Path\\n\",\n    \"from typing import *\\n\",\n    \"from unittest.mock import AsyncMock, MagicMock\\n\",\n    \"\\n\",\n    \"import anyio\\n\",\n    \"from pydantic import BaseModel\\n\",\n    \"\\n\",\n    \"from fastkafka._components.aiokafka_consumer_loop import (\\n\",\n    \"    aiokafka_consumer_loop,\\n\",\n    \"    sanitize_kafka_config,\\n\",\n    \")\\n\",\n    \"from fastkafka._components.asyncapi import (\\n\",\n    \"    ConsumeCallable,\\n\",\n    \"    ContactInfo,\\n\",\n    \"    KafkaBroker,\\n\",\n    \"    KafkaBrokers,\\n\",\n    \"    KafkaServiceInfo,\\n\",\n    \"    export_async_spec,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"import fastkafka._aiokafka_imports\\n\",\n    \"from fastkafka._aiokafka_imports import AIOKafkaConsumer, AIOKafkaProducer\\n\",\n    \"from fastkafka._components.benchmarking import _benchmark\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import delegates, export, filter_using_signature, patch\\n\",\n    \"from fastkafka._components.producer_decorator import ProduceCallable, producer_decorator\\n\",\n    \"from fastkafka._components.task_streaming import StreamExecutor\\n\",\n    \"from fastkafka._components.helpers import remove_suffix\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"85f14f6a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"if TYPE_CHECKING:\\n\",\n    \"    from fastapi import FastAPI\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fdedeee5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import __main__\\n\",\n    \"\\n\",\n    \"import os\\n\",\n    \"import shutil\\n\",\n    \"import unittest.mock\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"import pytest\\n\",\n    \"import requests\\n\",\n    \"import uvicorn\\n\",\n    \"import yaml\\n\",\n    \"from fastapi import FastAPI\\n\",\n    \"from pydantic import EmailStr, Field, HttpUrl\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import true_after\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._server import run_in_process\\n\",\n    \"from fastkafka.encoder import avro_decoder, avro_encoder, json_decoder, json_encoder\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, Tester, mock_AIOKafkaProducer_send\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b33a28e5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9cf16b2d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d9177ac1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9e0f175c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"211534a4\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Constructor utilities\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f978e721\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer, but=[\\\"bootstrap_servers\\\"])\\n\",\n    \"@delegates(fastkafka._aiokafka_imports.AIOKafkaProducer, but=[\\\"bootstrap_servers\\\"], keep=True)\\n\",\n    \"def _get_kafka_config(\\n\",\n    \"    bootstrap_servers_id: str = \\\"localhost\\\",\\n\",\n    \"    **kwargs: Any,\\n\",\n    \") -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"Get kafka config\\\"\\\"\\\"\\n\",\n    \"    allowed_keys = set(signature(_get_kafka_config).parameters.keys())\\n\",\n    \"    if not set(kwargs.keys()) <= allowed_keys:\\n\",\n    \"        unallowed_keys = \\\", \\\".join(\\n\",\n    \"            sorted([f\\\"'{x}'\\\" for x in set(kwargs.keys()).difference(allowed_keys)])\\n\",\n    \"        )\\n\",\n    \"        raise ValueError(f\\\"Unallowed key arguments passed: {unallowed_keys}\\\")\\n\",\n    \"    retval = kwargs.copy()\\n\",\n    \"\\n\",\n    \"    # todo: check this values\\n\",\n    \"    config_defaults = {\\n\",\n    \"        \\\"bootstrap_servers_id\\\": bootstrap_servers_id,\\n\",\n    \"        \\\"auto_offset_reset\\\": \\\"earliest\\\",\\n\",\n    \"        \\\"max_poll_records\\\": 100,\\n\",\n    \"    }\\n\",\n    \"    for key, value in config_defaults.items():\\n\",\n    \"        if key not in retval:\\n\",\n    \"            retval[key] = value\\n\",\n    \"\\n\",\n    \"    return retval\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8cdf9e16\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert _get_kafka_config() == {\\n\",\n    \"    \\\"bootstrap_servers_id\\\": \\\"localhost\\\",\\n\",\n    \"    \\\"auto_offset_reset\\\": \\\"earliest\\\",\\n\",\n    \"    \\\"max_poll_records\\\": 100,\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"assert _get_kafka_config(max_poll_records=1_000) == {\\n\",\n    \"    \\\"bootstrap_servers_id\\\": \\\"localhost\\\",\\n\",\n    \"    \\\"auto_offset_reset\\\": \\\"earliest\\\",\\n\",\n    \"    \\\"max_poll_records\\\": 1_000,\\n\",\n    \"}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"34b17b9f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    _get_kafka_config(random_key=1_000, whatever=\\\"whocares\\\")\\n\",\n    \"assert e.value.args == (\\\"Unallowed key arguments passed: 'random_key', 'whatever'\\\",)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b3477cb0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_kafka_brokers(\\n\",\n    \"    kafka_brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = None\\n\",\n    \") -> KafkaBrokers:\\n\",\n    \"    \\\"\\\"\\\"Get Kafka brokers\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        kafka_brokers: Kafka brokers\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if kafka_brokers is None:\\n\",\n    \"        retval: KafkaBrokers = KafkaBrokers(\\n\",\n    \"            brokers={\\n\",\n    \"                \\\"localhost\\\": KafkaBroker(  # type: ignore\\n\",\n    \"                    url=\\\"https://localhost\\\",\\n\",\n    \"                    description=\\\"Local (dev) Kafka broker\\\",\\n\",\n    \"                    port=\\\"9092\\\",\\n\",\n    \"                    grouping=\\\"localhost\\\",\\n\",\n    \"                )\\n\",\n    \"            }\\n\",\n    \"        )\\n\",\n    \"    else:\\n\",\n    \"        if isinstance(kafka_brokers, KafkaBrokers):\\n\",\n    \"            return kafka_brokers\\n\",\n    \"\\n\",\n    \"        retval = KafkaBrokers(\\n\",\n    \"            brokers={\\n\",\n    \"                k: (\\n\",\n    \"                    [\\n\",\n    \"                        KafkaBroker.model_validate_json(\\n\",\n    \"                            unwrapped_v.model_dump_json()\\n\",\n    \"                            if hasattr(unwrapped_v, \\\"model_dump_json\\\")\\n\",\n    \"                            else json.dumps(unwrapped_v)\\n\",\n    \"                        )\\n\",\n    \"                        for unwrapped_v in v\\n\",\n    \"                    ]\\n\",\n    \"                    if isinstance(v, list)\\n\",\n    \"                    else KafkaBroker.model_validate_json(\\n\",\n    \"                        v.model_dump_json() if hasattr(v, \\\"model_dump_json\\\") else json.dumps(v)\\n\",\n    \"                    )\\n\",\n    \"                )\\n\",\n    \"                for k, v in kafka_brokers.items()\\n\",\n    \"            }\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    return retval\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4449bac2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert (\\n\",\n    \"    _get_kafka_brokers(None).model_dump_json()\\n\",\n    \"    == '{\\\"brokers\\\": {\\\"localhost\\\": {\\\"url\\\": \\\"https://localhost\\\", \\\"description\\\": \\\"Local (dev) Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}}}'\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    _get_kafka_brokers(dict(localhost=dict(url=\\\"localhost\\\"))).model_dump_json()\\n\",\n    \"    == '{\\\"brokers\\\": {\\\"localhost\\\": {\\\"url\\\": \\\"localhost\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}}}'\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    _get_kafka_brokers(\\n\",\n    \"        dict(localhost=dict(url=\\\"localhost\\\"), staging=dict(url=\\\"staging.airt.ai\\\"))\\n\",\n    \"    ).model_dump_json()\\n\",\n    \"    == '{\\\"brokers\\\": {\\\"localhost\\\": {\\\"url\\\": \\\"localhost\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}, \\\"staging\\\": {\\\"url\\\": \\\"staging.airt.ai\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}}}'\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    _get_kafka_brokers(\\n\",\n    \"        dict(\\n\",\n    \"            localhost=[dict(url=\\\"localhost123\\\"), dict(url=\\\"localhost321\\\")],\\n\",\n    \"            staging=dict(url=\\\"staging.airt.ai\\\"),\\n\",\n    \"        )\\n\",\n    \"    ).model_dump_json()\\n\",\n    \"    == '{\\\"brokers\\\": {\\\"localhost-bootstrap-server-0\\\": {\\\"url\\\": \\\"localhost123\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}, \\\"localhost-bootstrap-server-1\\\": {\\\"url\\\": \\\"localhost321\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}, \\\"staging\\\": {\\\"url\\\": \\\"staging.airt.ai\\\", \\\"description\\\": \\\"Kafka broker\\\", \\\"protocol\\\": \\\"kafka\\\", \\\"variables\\\": {\\\"port\\\": {\\\"default\\\": \\\"9092\\\"}}}}}'\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b175647e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _get_broker_addr_list(\\n\",\n    \"    brokers: Union[List[KafkaBroker], KafkaBroker]\\n\",\n    \") -> Union[str, List[str]]:\\n\",\n    \"    if isinstance(brokers, list):\\n\",\n    \"        return [f\\\"{broker.url}:{broker.port}\\\" for broker in brokers]\\n\",\n    \"    else:\\n\",\n    \"        return f\\\"{brokers.url}:{brokers.port}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7cf5447e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"brokers_config = _get_kafka_brokers(\\n\",\n    \"    dict(\\n\",\n    \"        localhost=[dict(url=\\\"localhost123\\\"), dict(url=\\\"localhost321\\\")],\\n\",\n    \"        staging=dict(url=\\\"staging.airt.ai\\\"),\\n\",\n    \"    )\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"assert _get_broker_addr_list(brokers_config.brokers[\\\"localhost\\\"]) == ['localhost123:9092', 'localhost321:9092']\\n\",\n    \"assert _get_broker_addr_list(brokers_config.brokers[\\\"staging\\\"]) == 'staging.airt.ai:9092'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5f6c1f40\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_topic_name(\\n\",\n    \"    topic_callable: Union[ConsumeCallable, ProduceCallable], prefix: str = \\\"on_\\\"\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Get topic name\\n\",\n    \"    Args:\\n\",\n    \"        topic_callable: a function\\n\",\n    \"        prefix: prefix of the name of the function followed by the topic name\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The name of the topic\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    topic = topic_callable.__name__\\n\",\n    \"    if not topic.startswith(prefix) or len(topic) <= len(prefix):\\n\",\n    \"        raise ValueError(f\\\"Function name '{topic}' must start with {prefix}\\\")\\n\",\n    \"    topic = topic[len(prefix) :]\\n\",\n    \"\\n\",\n    \"    return topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ec9b3689\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def on_topic_name_1():\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert _get_topic_name(on_topic_name_1) == \\\"topic_name_1\\\"\\n\",\n    \"\\n\",\n    \"assert _get_topic_name(on_topic_name_1, prefix=\\\"on_topic_\\\") == \\\"name_1\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ab9e9f96\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_contact_info(\\n\",\n    \"    name: str = \\\"Author\\\",\\n\",\n    \"    url: str = \\\"https://www.google.com\\\",\\n\",\n    \"    email: str = \\\"noreply@gmail.com\\\",\\n\",\n    \") -> ContactInfo:\\n\",\n    \"    return ContactInfo(name=name, url=url, email=email)  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c7e311f0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert _get_contact_info() == ContactInfo(\\n\",\n    \"    name=\\\"Author\\\",\\n\",\n    \"    url=HttpUrl(url=\\\"https://www.google.com\\\"),\\n\",\n    \"    email=\\\"noreply@gmail.com\\\",\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"57eeeb84\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | exporti\\n\",\n    \"\\n\",\n    \"I = TypeVar(\\\"I\\\", bound=BaseModel)\\n\",\n    \"O = TypeVar(\\\"O\\\", BaseModel, Awaitable[BaseModel])\\n\",\n    \"\\n\",\n    \"F = TypeVar(\\\"F\\\", bound=Callable)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"49c37353\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka\\\")\\n\",\n    \"class FastKafka:\\n\",\n    \"    @delegates(_get_kafka_config)\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        *,\\n\",\n    \"        title: Optional[str] = None,\\n\",\n    \"        description: Optional[str] = None,\\n\",\n    \"        version: Optional[str] = None,\\n\",\n    \"        contact: Optional[Dict[str, str]] = None,\\n\",\n    \"        kafka_brokers: Optional[Dict[str, Any]] = None,\\n\",\n    \"        root_path: Optional[Union[Path, str]] = None,\\n\",\n    \"        lifespan: Optional[Callable[[\\\"FastKafka\\\"], AsyncContextManager[None]]] = None,\\n\",\n    \"        **kwargs: Any,\\n\",\n    \"    ):\\n\",\n    \"        \\\"\\\"\\\"Creates FastKafka application\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            title: optional title for the documentation. If None,\\n\",\n    \"                the title will be set to empty string\\n\",\n    \"            description: optional description for the documentation. If\\n\",\n    \"                None, the description will be set to empty string\\n\",\n    \"            version: optional version for the documentation. If None,\\n\",\n    \"                the version will be set to empty string\\n\",\n    \"            contact: optional contact for the documentation. If None, the\\n\",\n    \"                contact will be set to placeholder values:\\n\",\n    \"                name='Author' url=HttpUrl('https://www.google.com', ) email='noreply@gmail.com'\\n\",\n    \"            kafka_brokers: dictionary describing kafka brokers used for setting\\n\",\n    \"                the bootstrap server when running the applicationa and for\\n\",\n    \"                generating documentation. Defaults to\\n\",\n    \"                    {\\n\",\n    \"                        \\\"localhost\\\": {\\n\",\n    \"                            \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"                            \\\"description\\\": \\\"local kafka broker\\\",\\n\",\n    \"                            \\\"port\\\": \\\"9092\\\",\\n\",\n    \"                        }\\n\",\n    \"                    }\\n\",\n    \"            root_path: path to where documentation will be created\\n\",\n    \"            lifespan: asynccontextmanager that is used for setting lifespan hooks.\\n\",\n    \"                __aenter__ is called before app start and __aexit__ after app stop.\\n\",\n    \"                The lifespan is called whe application is started as async context\\n\",\n    \"                manager, e.g.:`async with kafka_app...`\\n\",\n    \"\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        # this is needed for documentation generation\\n\",\n    \"        self._title = title if title is not None else \\\"\\\"\\n\",\n    \"        self._description = description if description is not None else \\\"\\\"\\n\",\n    \"        self._version = version if version is not None else \\\"\\\"\\n\",\n    \"        if contact is not None:\\n\",\n    \"            self._contact_info = _get_contact_info(**contact)\\n\",\n    \"        else:\\n\",\n    \"            self._contact_info = _get_contact_info()\\n\",\n    \"\\n\",\n    \"        self._kafka_service_info = KafkaServiceInfo(\\n\",\n    \"            title=self._title,\\n\",\n    \"            version=self._version,\\n\",\n    \"            description=self._description,\\n\",\n    \"            contact=self._contact_info,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        if kafka_brokers is None:\\n\",\n    \"            kafka_brokers = {\\n\",\n    \"                \\\"localhost\\\": {\\n\",\n    \"                    \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"                    \\\"description\\\": \\\"local kafka broker\\\",\\n\",\n    \"                    \\\"port\\\": \\\"9092\\\",\\n\",\n    \"                }\\n\",\n    \"            }\\n\",\n    \"\\n\",\n    \"        self._kafka_brokers = _get_kafka_brokers(kafka_brokers)\\n\",\n    \"\\n\",\n    \"        self._override_brokers: List[KafkaBrokers] = []\\n\",\n    \"\\n\",\n    \"        self._root_path = Path(\\\".\\\") if root_path is None else Path(root_path)\\n\",\n    \"        self._root_path.mkdir(exist_ok=True, parents=True)\\n\",\n    \"\\n\",\n    \"        self._asyncapi_path = self._root_path / \\\"asyncapi\\\"\\n\",\n    \"\\n\",\n    \"        # this is used as default parameters for creating AIOProducer and AIOConsumer objects\\n\",\n    \"        self._kafka_config = _get_kafka_config(**kwargs)\\n\",\n    \"\\n\",\n    \"        #\\n\",\n    \"        self._consumers_store: Dict[\\n\",\n    \"            str,\\n\",\n    \"            Tuple[\\n\",\n    \"                ConsumeCallable,\\n\",\n    \"                Callable[[bytes, Type[BaseModel]], Any],\\n\",\n    \"                Union[str, StreamExecutor, None],\\n\",\n    \"                Optional[KafkaBrokers],\\n\",\n    \"                Dict[str, Any],\\n\",\n    \"            ],\\n\",\n    \"        ] = {}\\n\",\n    \"\\n\",\n    \"        self._producers_store: Dict[  # type: ignore\\n\",\n    \"            str,\\n\",\n    \"            Tuple[\\n\",\n    \"                ProduceCallable,\\n\",\n    \"                fastkafka._aiokafka_imports.AIOKafkaProducer,\\n\",\n    \"                Optional[KafkaBrokers],\\n\",\n    \"                Dict[str, Any],\\n\",\n    \"            ],\\n\",\n    \"        ] = {}\\n\",\n    \"\\n\",\n    \"        self._producers_list: List[fastkafka._aiokafka_imports.AIOKafkaProducer] = []  # type: ignore\\n\",\n    \"\\n\",\n    \"        self.benchmark_results: Dict[str, Dict[str, Any]] = {}\\n\",\n    \"\\n\",\n    \"        # background tasks\\n\",\n    \"        self._scheduled_bg_tasks: List[Callable[..., Coroutine[Any, Any, Any]]] = []\\n\",\n    \"        self._bg_task_group_generator: Optional[anyio.abc.TaskGroup] = None\\n\",\n    \"        self._bg_tasks_group: Optional[anyio.abc.TaskGroup] = None\\n\",\n    \"\\n\",\n    \"        # todo: use this for errrors\\n\",\n    \"        self._on_error_topic: Optional[str] = None\\n\",\n    \"\\n\",\n    \"        self.lifespan = lifespan\\n\",\n    \"        self.lifespan_ctx: Optional[AsyncContextManager[None]] = None\\n\",\n    \"\\n\",\n    \"        self._is_started: bool = False\\n\",\n    \"        self._is_shutting_down: bool = False\\n\",\n    \"        self._kafka_consumer_tasks: List[asyncio.Task[Any]] = []\\n\",\n    \"        self._kafka_producer_tasks: List[asyncio.Task[Any]] = []\\n\",\n    \"        self._running_bg_tasks: List[asyncio.Task[Any]] = []\\n\",\n    \"        self.run = False\\n\",\n    \"\\n\",\n    \"        # testing functions\\n\",\n    \"        self.AppMocks = None\\n\",\n    \"        self.mocks = None\\n\",\n    \"        self.awaited_mocks = None\\n\",\n    \"\\n\",\n    \"    @property\\n\",\n    \"    def is_started(self) -> bool:\\n\",\n    \"        \\\"\\\"\\\"Property indicating whether the FastKafka object is started.\\n\",\n    \"\\n\",\n    \"        The is_started property indicates if the FastKafka object is currently \\n\",\n    \"        in a started state. This implies that all background tasks, producers, \\n\",\n    \"        and consumers have been initiated, and the object is successfully connected \\n\",\n    \"        to the Kafka broker. \\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            bool: True if the object is started, False otherwise.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return self._is_started\\n\",\n    \"\\n\",\n    \"    def set_kafka_broker(self, kafka_broker_name: str) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Sets the Kafka broker to start FastKafka with\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            kafka_broker_name: The name of the Kafka broker to start FastKafka\\n\",\n    \"\\n\",\n    \"        Raises:\\n\",\n    \"            ValueError: If the provided kafka_broker_name is not found in dictionary of kafka_brokers\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        if kafka_broker_name not in self._kafka_brokers.brokers:\\n\",\n    \"            raise ValueError(\\n\",\n    \"                f\\\"Given kafka_broker_name '{kafka_broker_name}' is not found in kafka_brokers, available options are {self._kafka_brokers.brokers.keys()}\\\"\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"        self._kafka_config[\\\"bootstrap_servers_id\\\"] = kafka_broker_name\\n\",\n    \"\\n\",\n    \"    async def __aenter__(self) -> \\\"FastKafka\\\":\\n\",\n    \"        if self.lifespan is not None:\\n\",\n    \"            self.lifespan_ctx = self.lifespan(self)\\n\",\n    \"            await self.lifespan_ctx.__aenter__()\\n\",\n    \"        await self._start()\\n\",\n    \"        return self\\n\",\n    \"\\n\",\n    \"    async def __aexit__(\\n\",\n    \"        self,\\n\",\n    \"        exc_type: Optional[Type[BaseException]],\\n\",\n    \"        exc: Optional[BaseException],\\n\",\n    \"        tb: Optional[types.TracebackType],\\n\",\n    \"    ) -> None:\\n\",\n    \"        await self._stop()\\n\",\n    \"        if self.lifespan_ctx is not None:\\n\",\n    \"            await self.lifespan_ctx.__aexit__(exc_type, exc, tb)\\n\",\n    \"\\n\",\n    \"    async def _start(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _stop(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def consumes(\\n\",\n    \"        self,\\n\",\n    \"        topic: Optional[str] = None,\\n\",\n    \"        decoder: str = \\\"json\\\",\\n\",\n    \"        *,\\n\",\n    \"        prefix: str = \\\"on_\\\",\\n\",\n    \"        brokers: Optional[KafkaBrokers] = None,\\n\",\n    \"        description: Optional[str] = None,\\n\",\n    \"        **kwargs: Dict[str, Any],\\n\",\n    \"    ) -> ConsumeCallable:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def produces(\\n\",\n    \"        self,\\n\",\n    \"        topic: Optional[str] = None,\\n\",\n    \"        encoder: str = \\\"json\\\",\\n\",\n    \"        *,\\n\",\n    \"        prefix: str = \\\"to_\\\",\\n\",\n    \"        brokers: Optional[KafkaBrokers] = None,\\n\",\n    \"        description: Optional[str] = None,\\n\",\n    \"        **kwargs: Dict[str, Any],\\n\",\n    \"    ) -> ProduceCallable:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def benchmark(\\n\",\n    \"        self,\\n\",\n    \"        interval: Union[int, timedelta] = 1,\\n\",\n    \"        *,\\n\",\n    \"        sliding_window_size: Optional[int] = None,\\n\",\n    \"    ) -> Callable[[F], F]:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def run_in_background(\\n\",\n    \"        self,\\n\",\n    \"    ) -> Callable[[], Any]:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def _populate_consumers(\\n\",\n    \"        self,\\n\",\n    \"        is_shutting_down_f: Callable[[], bool],\\n\",\n    \"    ) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def get_topics(self) -> Iterable[str]:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _populate_producers(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _populate_bg_tasks(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def create_docs(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    def create_mocks(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _shutdown_consumers(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _shutdown_producers(self) -> None:\\n\",\n    \"        raise NotImplementedError\\n\",\n    \"\\n\",\n    \"    async def _shutdown_bg_tasks(self) -> None:\\n\",\n    \"        raise NotImplementedError\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c25f82b3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert FastKafka.__module__ == \\\"fastkafka\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"894af799\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"kafka_app = FastKafka(\\n\",\n    \"    kafka_brokers=dict(\\n\",\n    \"        localhost=[dict(url=\\\"localhost\\\", port=\\\"9092\\\"), dict(url=\\\"localhost\\\", port=\\\"9093\\\")]\\n\",\n    \"    )\\n\",\n    \")\\n\",\n    \"assert kafka_app._kafka_brokers == KafkaBrokers(\\n\",\n    \"    brokers={\\n\",\n    \"        \\\"localhost\\\": [\\n\",\n    \"            KafkaBroker(\\n\",\n    \"                url=\\\"localhost\\\",\\n\",\n    \"                description=\\\"Kafka broker\\\",\\n\",\n    \"                port=\\\"9092\\\",\\n\",\n    \"                protocol=\\\"kafka\\\",\\n\",\n    \"                security=None,\\n\",\n    \"            ),\\n\",\n    \"            KafkaBroker(\\n\",\n    \"                url=\\\"localhost\\\",\\n\",\n    \"                description=\\\"Kafka broker\\\",\\n\",\n    \"                port=\\\"9093\\\",\\n\",\n    \"                protocol=\\\"kafka\\\",\\n\",\n    \"                security=None,\\n\",\n    \"            ),\\n\",\n    \"        ]\\n\",\n    \"    }\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"997906d0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"kafka_app = FastKafka()\\n\",\n    \"assert kafka_app._kafka_brokers == KafkaBrokers(\\n\",\n    \"    brokers={\\n\",\n    \"        \\\"localhost\\\": KafkaBroker(\\n\",\n    \"            url=\\\"localhost\\\",\\n\",\n    \"            description=\\\"local kafka broker\\\",\\n\",\n    \"            port=\\\"9092\\\",\\n\",\n    \"            protocol=\\\"kafka\\\",\\n\",\n    \"            security=None,\\n\",\n    \"        )\\n\",\n    \"    }\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3cfbe17d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def create_testing_app(\\n\",\n    \"    *, root_path: str = \\\"/tmp/000_FastKafka\\\", bootstrap_servers: Optional[str] = None\\n\",\n    \"):\\n\",\n    \"    if Path(root_path).exists():\\n\",\n    \"        shutil.rmtree(root_path)\\n\",\n    \"\\n\",\n    \"    host, port = None, None\\n\",\n    \"    if bootstrap_servers is not None:\\n\",\n    \"        host, port = bootstrap_servers.split(\\\":\\\")\\n\",\n    \"\\n\",\n    \"    kafka_app = FastKafka(\\n\",\n    \"        kafka_brokers={\\n\",\n    \"            \\\"localhost\\\": {\\n\",\n    \"                \\\"url\\\": host if host is not None else \\\"localhost\\\",\\n\",\n    \"                \\\"name\\\": \\\"development\\\",\\n\",\n    \"                \\\"description\\\": \\\"Local (dev) Kafka broker\\\",\\n\",\n    \"                \\\"port\\\": port if port is not None else \\\"9092\\\",\\n\",\n    \"            }\\n\",\n    \"        },\\n\",\n    \"        root_path=root_path,\\n\",\n    \"    )\\n\",\n    \"    kafka_app.set_kafka_broker(kafka_broker_name=\\\"localhost\\\")\\n\",\n    \"\\n\",\n    \"    return kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"66237424\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<fastkafka.FastKafka>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"app = create_testing_app()\\n\",\n    \"assert Path(\\\"/tmp/000_FastKafka\\\").exists()\\n\",\n    \"app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5bebbf80\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_decoder_fn(decoder: str) -> Callable[[bytes, Type[BaseModel]], Any]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Imports and returns decoder function based on input\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if decoder == \\\"json\\\":\\n\",\n    \"        from fastkafka._components.encoder.json import json_decoder\\n\",\n    \"\\n\",\n    \"        return json_decoder\\n\",\n    \"    elif decoder == \\\"avro\\\":\\n\",\n    \"        try:\\n\",\n    \"            from fastkafka._components.encoder.avro import avro_decoder\\n\",\n    \"        except ModuleNotFoundError:\\n\",\n    \"            raise ModuleNotFoundError(\\n\",\n    \"                \\\"Unable to import avro packages. Please install FastKafka using the command 'fastkafka[avro]'\\\"\\n\",\n    \"            )\\n\",\n    \"        return avro_decoder\\n\",\n    \"    else:\\n\",\n    \"        raise ValueError(f\\\"Unknown decoder - {decoder}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6715c512\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"actual = _get_decoder_fn(\\\"json\\\")\\n\",\n    \"assert actual == json_decoder\\n\",\n    \"\\n\",\n    \"actual = _get_decoder_fn(\\\"avro\\\")\\n\",\n    \"assert actual == avro_decoder\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a38da3a6\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _prepare_and_check_brokers(\\n\",\n    \"    app: FastKafka, kafka_brokers: Optional[Union[Dict[str, Any], KafkaBrokers]]\\n\",\n    \") -> Optional[KafkaBrokers]:\\n\",\n    \"    if kafka_brokers is not None:\\n\",\n    \"        prepared_brokers = _get_kafka_brokers(kafka_brokers)\\n\",\n    \"        if prepared_brokers.brokers.keys() != app._kafka_brokers.brokers.keys():\\n\",\n    \"            raise ValueError(\\n\",\n    \"                f\\\"To override application default brokers, you must define all of the broker options. Default defined: {set(app._kafka_brokers.brokers.keys())}, override defined: {set(prepared_brokers.brokers.keys())}\\\"\\n\",\n    \"            )\\n\",\n    \"        return prepared_brokers\\n\",\n    \"    return None\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c4b8b1ab\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _resolve_key(key: str, dictionary: Dict[str, Any]) -> str:\\n\",\n    \"    i = 0\\n\",\n    \"    resolved_key = f\\\"{key}_{i}\\\"\\n\",\n    \"    while resolved_key in dictionary:\\n\",\n    \"        i += 1\\n\",\n    \"        resolved_key = f\\\"{key}_{i}\\\"\\n\",\n    \"    return resolved_key\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9cddd5c3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(fastkafka._aiokafka_imports.AIOKafkaConsumer)\\n\",\n    \"def consumes(\\n\",\n    \"    self: FastKafka,\\n\",\n    \"    topic: Optional[str] = None,\\n\",\n    \"    decoder: Union[str, Callable[[bytes, Type[BaseModel]], Any]] = \\\"json\\\",\\n\",\n    \"    *,\\n\",\n    \"    executor: Union[str, StreamExecutor, None] = None,\\n\",\n    \"    brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = None,\\n\",\n    \"    prefix: str = \\\"on_\\\",\\n\",\n    \"    description: Optional[str] = None,\\n\",\n    \"    **kwargs: Dict[str, Any],\\n\",\n    \") -> Callable[[ConsumeCallable], ConsumeCallable]:\\n\",\n    \"    \\\"\\\"\\\"Decorator registering the callback called when a message is received in a topic.\\n\",\n    \"\\n\",\n    \"    This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: Kafka topic that the consumer will subscribe to and execute the\\n\",\n    \"            decorated function when it receives a message from the topic,\\n\",\n    \"            default: None. If the topic is not specified, topic name will be\\n\",\n    \"            inferred from the decorated function name by stripping the defined prefix\\n\",\n    \"        decoder: Decoder to use to decode messages consumed from the topic,\\n\",\n    \"                default: json - By default, it uses json decoder to decode\\n\",\n    \"                bytes to json string and then it creates instance of pydantic\\n\",\n    \"                BaseModel. It also accepts custom decoder function.\\n\",\n    \"        executor: Type of executor to choose for consuming tasks. Avaliable options\\n\",\n    \"                are \\\"SequentialExecutor\\\" and \\\"DynamicTaskExecutor\\\". The default option is\\n\",\n    \"                \\\"SequentialExecutor\\\" which will execute the consuming tasks sequentially.\\n\",\n    \"                If the consuming tasks have high latency it is recommended to use\\n\",\n    \"                \\\"DynamicTaskExecutor\\\" which will wrap the consuming functions into tasks\\n\",\n    \"                and run them in on asyncio loop in background. This comes with a cost of\\n\",\n    \"                increased overhead so use it only in cases when your consume functions have\\n\",\n    \"                high latency such as database queries or some other type of networking.\\n\",\n    \"        prefix: Prefix stripped from the decorated function to define a topic name\\n\",\n    \"                if the topic argument is not passed, default: \\\"on_\\\". If the decorated\\n\",\n    \"                function name is not prefixed with the defined prefix and topic argument\\n\",\n    \"                is not passed, then this method will throw ValueError\\n\",\n    \"        brokers: Optional argument specifying multiple broker clusters for consuming\\n\",\n    \"                messages from different Kafka clusters in FastKafka.\\n\",\n    \"        description: Optional description of the consuming function async docs.\\n\",\n    \"                If not provided, consuming function __doc__ attr will be used.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A function returning the same function\\n\",\n    \"\\n\",\n    \"    Throws:\\n\",\n    \"        ValueError\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def _decorator(\\n\",\n    \"        on_topic: ConsumeCallable,\\n\",\n    \"        topic: Optional[str] = topic,\\n\",\n    \"        decoder: Union[str, Callable[[bytes, Type[BaseModel]], Any]] = decoder,\\n\",\n    \"        executor: Union[str, StreamExecutor, None] = executor,\\n\",\n    \"        brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = brokers,\\n\",\n    \"        description: Optional[str] = description,\\n\",\n    \"        kwargs: Dict[str, Any] = kwargs,\\n\",\n    \"    ) -> ConsumeCallable:\\n\",\n    \"        topic_resolved: str = (\\n\",\n    \"            _get_topic_name(topic_callable=on_topic, prefix=prefix)\\n\",\n    \"            if topic is None\\n\",\n    \"            else topic\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        decoder_fn = _get_decoder_fn(decoder) if isinstance(decoder, str) else decoder\\n\",\n    \"\\n\",\n    \"        prepared_broker = _prepare_and_check_brokers(self, brokers)\\n\",\n    \"        if prepared_broker is not None:\\n\",\n    \"            self._override_brokers.append(prepared_broker.brokers)  # type: ignore\\n\",\n    \"        else:\\n\",\n    \"            prepared_broker = self._kafka_brokers\\n\",\n    \"\\n\",\n    \"        if description is not None:\\n\",\n    \"            setattr(on_topic, \\\"description\\\", description)\\n\",\n    \"\\n\",\n    \"        self._consumers_store[_resolve_key(topic_resolved, self._consumers_store)] = (\\n\",\n    \"            on_topic,\\n\",\n    \"            decoder_fn,\\n\",\n    \"            executor,\\n\",\n    \"            prepared_broker,\\n\",\n    \"            kwargs,\\n\",\n    \"        )\\n\",\n    \"        setattr(self, on_topic.__name__, on_topic)\\n\",\n    \"        return on_topic\\n\",\n    \"\\n\",\n    \"    return _decorator\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5e527eb5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"app = create_testing_app()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Basic check\\n\",\n    \"@app.consumes()\\n\",\n    \"def on_my_topic_1(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app._consumers_store[\\\"my_topic_1_0\\\"] == (\\n\",\n    \"    on_my_topic_1,\\n\",\n    \"    json_decoder,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    {},\\n\",\n    \"), app._consumers_store\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"on_my_topic_1\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Check executor setting\\n\",\n    \"@app.consumes(executor=\\\"DynamicTaskExecutor\\\")\\n\",\n    \"def on_my_topic_12(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app._consumers_store[\\\"my_topic_12_0\\\"] == (\\n\",\n    \"    on_my_topic_12,\\n\",\n    \"    json_decoder,\\n\",\n    \"    \\\"DynamicTaskExecutor\\\",\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    {},\\n\",\n    \"), app._consumers_store[\\\"my_topic_12\\\"]\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"on_my_topic_12\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Check topic setting\\n\",\n    \"@app.consumes(topic=\\\"test_topic_1\\\")\\n\",\n    \"def some_func_name(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app._consumers_store[\\\"test_topic_1_0\\\"] == (\\n\",\n    \"    some_func_name,\\n\",\n    \"    json_decoder,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    {},\\n\",\n    \"), app._consumers_store\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Check prefix change\\n\",\n    \"@app.consumes(prefix=\\\"for_\\\")\\n\",\n    \"def for_test_topic_3(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app._consumers_store[\\\"test_topic_3_0\\\"] == (\\n\",\n    \"    for_test_topic_3,\\n\",\n    \"    json_decoder,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    {},\\n\",\n    \"), app._consumers_store\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"for_test_topic_3\\\")\\n\",\n    \"\\n\",\n    \"# Check passing of kwargs\\n\",\n    \"kwargs = {\\\"arg1\\\": \\\"val1\\\", \\\"arg2\\\": 2}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"test_topic\\\", **kwargs)\\n\",\n    \"def for_test_kwargs(msg: BaseModel):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app._consumers_store[\\\"test_topic_0\\\"] == (\\n\",\n    \"    for_test_kwargs,\\n\",\n    \"    json_decoder,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    kwargs,\\n\",\n    \"), app._consumers_store\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"for_test_kwargs\\\")\\n\",\n    \"\\n\",\n    \"# Check description setting\\n\",\n    \"@app.consumes(description = \\\"Some generic description\\\")\\n\",\n    \"def on_test_topic_description(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app._consumers_store[\\\"test_topic_description_0\\\"][0].description == \\\"Some generic description\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b2fb7fb1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# check broker overriding\\n\",\n    \"@app.consumes(\\n\",\n    \"    brokers=dict(\\n\",\n    \"        localhost=[\\n\",\n    \"            dict(url=\\\"localhost\\\", port=\\\"9092\\\"),\\n\",\n    \"        ]\\n\",\n    \"    ),\\n\",\n    \")\\n\",\n    \"def on_my_topic_12345(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"expected = (\\n\",\n    \"    on_my_topic_12345,\\n\",\n    \"    json_decoder,\\n\",\n    \"    None,\\n\",\n    \"    KafkaBrokers(\\n\",\n    \"        brokers={\\n\",\n    \"            \\\"localhost\\\": [\\n\",\n    \"                KafkaBroker(\\n\",\n    \"                    url=\\\"localhost\\\",\\n\",\n    \"                    description=\\\"Kafka broker\\\",\\n\",\n    \"                    port=\\\"9092\\\",\\n\",\n    \"                    protocol=\\\"kafka\\\",\\n\",\n    \"                    security=None,\\n\",\n    \"                ),\\n\",\n    \"            ]\\n\",\n    \"        }\\n\",\n    \"    ),\\n\",\n    \"    {},\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"actual = app._consumers_store[\\\"my_topic_12345_0\\\"]\\n\",\n    \"\\n\",\n    \"assert actual == expected, f\\\"{actual}!={expected}\\\"\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"on_my_topic_12345\\\")\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"\\n\",\n    \"    @app.consumes(\\n\",\n    \"        brokers=dict(\\n\",\n    \"            not_localhost=[\\n\",\n    \"                dict(url=\\\"localhost\\\", port=\\\"9092\\\"),\\n\",\n    \"                dict(url=\\\"localhost\\\", port=\\\"9093\\\"),\\n\",\n    \"            ]\\n\",\n    \"        ),\\n\",\n    \"    )\\n\",\n    \"    def on_my_topic_12345(msg: BaseModel) -> None:\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    e.value.args[0]\\n\",\n    \"    == \\\"To override application default brokers, you must define all of the broker options. Default defined: {'localhost'}, override defined: {'not_localhost'}\\\"\\n\",\n    \")\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c01256d4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_encoder_fn(encoder: str) -> Callable[[BaseModel], bytes]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Imports and returns encoder function based on input\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if encoder == \\\"json\\\":\\n\",\n    \"        from fastkafka._components.encoder.json import json_encoder\\n\",\n    \"\\n\",\n    \"        return json_encoder\\n\",\n    \"    elif encoder == \\\"avro\\\":\\n\",\n    \"        try:\\n\",\n    \"            from fastkafka._components.encoder.avro import avro_encoder\\n\",\n    \"        except ModuleNotFoundError:\\n\",\n    \"            raise ModuleNotFoundError(\\n\",\n    \"                \\\"Unable to import avro packages. Please install FastKafka using the command 'fastkafka[avro]'\\\"\\n\",\n    \"            )\\n\",\n    \"        return avro_encoder\\n\",\n    \"    else:\\n\",\n    \"        raise ValueError(f\\\"Unknown encoder - {encoder}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"804434e1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"actual = _get_encoder_fn(\\\"json\\\")\\n\",\n    \"assert actual == json_encoder\\n\",\n    \"\\n\",\n    \"actual = _get_encoder_fn(\\\"avro\\\")\\n\",\n    \"assert actual == avro_encoder\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9e269659\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"@delegates(fastkafka._aiokafka_imports.AIOKafkaProducer)\\n\",\n    \"def produces(\\n\",\n    \"    self: FastKafka,\\n\",\n    \"    topic: Optional[str] = None,\\n\",\n    \"    encoder: Union[str, Callable[[BaseModel], bytes]] = \\\"json\\\",\\n\",\n    \"    *,\\n\",\n    \"    prefix: str = \\\"to_\\\",\\n\",\n    \"    brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = None,\\n\",\n    \"    description: Optional[str] = None,\\n\",\n    \"    **kwargs: Dict[str, Any],\\n\",\n    \") -> Callable[[ProduceCallable], ProduceCallable]:\\n\",\n    \"    \\\"\\\"\\\"Decorator registering the callback called when delivery report for a produced message is received\\n\",\n    \"\\n\",\n    \"    This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: Kafka topic that the producer will send returned values from\\n\",\n    \"            the decorated function to, default: None- If the topic is not\\n\",\n    \"            specified, topic name will be inferred from the decorated function\\n\",\n    \"            name by stripping the defined prefix.\\n\",\n    \"        encoder: Encoder to use to encode messages before sending it to topic,\\n\",\n    \"                default: json - By default, it uses json encoder to convert\\n\",\n    \"                pydantic basemodel to json string and then encodes the string to bytes\\n\",\n    \"                using 'utf-8' encoding. It also accepts custom encoder function.\\n\",\n    \"        prefix: Prefix stripped from the decorated function to define a topic\\n\",\n    \"            name if the topic argument is not passed, default: \\\"to_\\\". If the\\n\",\n    \"            decorated function name is not prefixed with the defined prefix\\n\",\n    \"            and topic argument is not passed, then this method will throw ValueError\\n\",\n    \"        brokers: Optional argument specifying multiple broker clusters for consuming\\n\",\n    \"            messages from different Kafka clusters in FastKafka.\\n\",\n    \"        description: Optional description of the producing function async docs.\\n\",\n    \"                If not provided, producing function __doc__ attr will be used.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A function returning the same function\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: when needed\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def _decorator(\\n\",\n    \"        to_topic: ProduceCallable,\\n\",\n    \"        topic: Optional[str] = topic,\\n\",\n    \"        brokers: Optional[Union[Dict[str, Any], KafkaBrokers]] = brokers,\\n\",\n    \"        description: Optional[str] = description,\\n\",\n    \"        kwargs: Dict[str, Any] = kwargs,\\n\",\n    \"    ) -> ProduceCallable:\\n\",\n    \"        topic_resolved: str = (\\n\",\n    \"            _get_topic_name(topic_callable=to_topic, prefix=prefix)\\n\",\n    \"            if topic is None\\n\",\n    \"            else topic\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        topic_key = _resolve_key(topic_resolved, self._producers_store)\\n\",\n    \"\\n\",\n    \"        prepared_broker = _prepare_and_check_brokers(self, brokers)\\n\",\n    \"        if prepared_broker is not None:\\n\",\n    \"            self._override_brokers.append(prepared_broker.brokers)  # type: ignore\\n\",\n    \"        else:\\n\",\n    \"            prepared_broker = self._kafka_brokers\\n\",\n    \"\\n\",\n    \"        if description is not None:\\n\",\n    \"            setattr(to_topic, \\\"description\\\", description)\\n\",\n    \"\\n\",\n    \"        self._producers_store[topic_key] = (\\n\",\n    \"            to_topic,\\n\",\n    \"            None,\\n\",\n    \"            prepared_broker,\\n\",\n    \"            kwargs,\\n\",\n    \"        )\\n\",\n    \"        encoder_fn = _get_encoder_fn(encoder) if isinstance(encoder, str) else encoder\\n\",\n    \"        decorated = producer_decorator(\\n\",\n    \"            self._producers_store,\\n\",\n    \"            to_topic,\\n\",\n    \"            topic_key,\\n\",\n    \"            encoder_fn=encoder_fn,\\n\",\n    \"        )\\n\",\n    \"        setattr(self, to_topic.__name__, decorated)\\n\",\n    \"        return decorated\\n\",\n    \"\\n\",\n    \"    return _decorator\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"acf7cede\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"app = create_testing_app()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Basic check\\n\",\n    \"async def to_my_topic_1(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Must be done without sugar to keep the original function reference\\n\",\n    \"check_func = to_my_topic_1\\n\",\n    \"to_my_topic_1 = app.produces()(to_my_topic_1)\\n\",\n    \"\\n\",\n    \"assert app._producers_store[\\\"my_topic_1_0\\\"] == (\\n\",\n    \"    check_func,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    {},\\n\",\n    \"), f\\\"{app._producers_store}, {to_my_topic_1}\\\"\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"to_my_topic_1\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Check topic setting\\n\",\n    \"async def some_func_name(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"check_func = some_func_name\\n\",\n    \"some_func_name = app.produces(topic=\\\"test_topic_2\\\")(some_func_name)\\n\",\n    \"\\n\",\n    \"assert app._producers_store[\\\"test_topic_2_0\\\"] == (\\n\",\n    \"    check_func,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    {},\\n\",\n    \"), app._producers_store\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"some_func_name\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Check prefix change\\n\",\n    \"async def for_test_topic_3(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"check_func = for_test_topic_3\\n\",\n    \"some_func_name = app.produces(prefix=\\\"for_\\\")(for_test_topic_3)\\n\",\n    \"\\n\",\n    \"assert app._producers_store[\\\"test_topic_3_0\\\"] == (\\n\",\n    \"    check_func,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    {},\\n\",\n    \"), app._producers_store\\n\",\n    \"\\n\",\n    \"# Check passing of kwargs\\n\",\n    \"kwargs = {\\\"arg1\\\": \\\"val1\\\", \\\"arg2\\\": 2}\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"for_test_topic_3\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def for_test_kwargs(msg: BaseModel):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"check_func = for_test_kwargs\\n\",\n    \"for_test_kwargs = app.produces(topic=\\\"test_topic_0\\\", **kwargs)(for_test_kwargs)\\n\",\n    \"\\n\",\n    \"assert app._producers_store[\\\"test_topic_0_0\\\"] == (\\n\",\n    \"    check_func,\\n\",\n    \"    None,\\n\",\n    \"    app._kafka_brokers,\\n\",\n    \"    kwargs,\\n\",\n    \"), app._producers_store\\n\",\n    \"\\n\",\n    \"assert hasattr(app, \\\"for_test_kwargs\\\")\\n\",\n    \"\\n\",\n    \"# Check description setting\\n\",\n    \"async def to_test_topic_description(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"some_func_name = app.produces(description=\\\"Some generic producer\\\")(to_test_topic_description)\\n\",\n    \"\\n\",\n    \"assert app._producers_store[\\\"test_topic_description_0\\\"][0].description == \\\"Some generic producer\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"35f1d574\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"async def to_test_topic_broker_override(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"check_func = to_test_topic_broker_override\\n\",\n    \"some_func_name = app.produces(\\n\",\n    \"    brokers=dict(\\n\",\n    \"        localhost=[\\n\",\n    \"            dict(url=\\\"localhost\\\", port=9092),\\n\",\n    \"        ]\\n\",\n    \"    ),\\n\",\n    \")(to_test_topic_broker_override)\\n\",\n    \"\\n\",\n    \"assert app._producers_store[\\\"test_topic_broker_override_0\\\"] == (\\n\",\n    \"    check_func,\\n\",\n    \"    None,\\n\",\n    \"    KafkaBrokers(\\n\",\n    \"        brokers={\\n\",\n    \"            \\\"localhost\\\": [\\n\",\n    \"                KafkaBroker(\\n\",\n    \"                    url=\\\"localhost\\\",\\n\",\n    \"                    description=\\\"Kafka broker\\\",\\n\",\n    \"                    port=9092,\\n\",\n    \"                    protocol=\\\"kafka\\\",\\n\",\n    \"                    security=None,\\n\",\n    \"                ),\\n\",\n    \"            ]\\n\",\n    \"        }\\n\",\n    \"    ),\\n\",\n    \"    {},\\n\",\n    \"), app._producers_store[\\\"test_topic_broker_override_0\\\"]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def to_test_topic_broker_wrong(msg: BaseModel) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    check_func = to_test_topic_broker_wrong\\n\",\n    \"    some_func_name = app.produces(\\n\",\n    \"        brokers=dict(\\n\",\n    \"            not_localhost=[\\n\",\n    \"                dict(url=\\\"localhost\\\", port=9092),\\n\",\n    \"                dict(url=\\\"localhost\\\", port=9093),\\n\",\n    \"            ]\\n\",\n    \"        ),\\n\",\n    \"    )(to_test_topic_broker_wrong)\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    e.value.args[0]\\n\",\n    \"    == \\\"To override application default brokers, you must define all of the broker options. Default defined: {'localhost'}, override defined: {'not_localhost'}\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6d507987\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def get_topics(self: FastKafka) -> Iterable[str]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Get all topics for both producing and consuming.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A set of topics for both producing and consuming.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    produce_topics = set([remove_suffix(topic) for topic in self._producers_store])\\n\",\n    \"    consume_topics = set([remove_suffix(topic) for topic in self._consumers_store])\\n\",\n    \"    return consume_topics.union(produce_topics)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ac053363\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"app = create_testing_app()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_topic_1() -> BaseModel:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"def on_topic_2(msg: BaseModel):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app.get_topics() == set([\\\"topic_1\\\", \\\"topic_2\\\"]), f\\\"{app.get_topics()=}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b4744bd3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def run_in_background(\\n\",\n    \"    self: FastKafka,\\n\",\n    \") -> Callable[\\n\",\n    \"    [Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]\\n\",\n    \"]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decorator to schedule a task to be run in the background.\\n\",\n    \"\\n\",\n    \"    This decorator is used to schedule a task to be run in the background when the app's `_on_startup` event is triggered.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Callable[None, None]: A decorator function that takes a background task as an input and stores it to be run in the backround.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def _decorator(\\n\",\n    \"        bg_task: Callable[..., Coroutine[Any, Any, Any]]\\n\",\n    \"    ) -> Callable[..., Coroutine[Any, Any, Any]]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Store the background task.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            bg_task (Callable[[], None]): The background task to be run asynchronously.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            Callable[[], None]: Original background task.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"run_in_background() : Adding function '{bg_task.__name__}' as background task\\\"\\n\",\n    \"        )\\n\",\n    \"        self._scheduled_bg_tasks.append(bg_task)\\n\",\n    \"\\n\",\n    \"        return bg_task\\n\",\n    \"\\n\",\n    \"    return _decorator\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"16917ec0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: run_in_background() : Adding function 'async_background_job' as background task\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Check if the background job is getting registered\\n\",\n    \"\\n\",\n    \"app = create_testing_app()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.run_in_background()\\n\",\n    \"async def async_background_job():\\n\",\n    \"    \\\"\\\"\\\"Async background job\\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert app._scheduled_bg_tasks[0] == async_background_job, app._scheduled_bg_tasks[0]\\n\",\n    \"assert app._scheduled_bg_tasks.__len__() == 1\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e107bbeb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class MyInfo(BaseModel):\\n\",\n    \"    mobile: str = Field(..., example=\\\"+385987654321\\\")\\n\",\n    \"    name: str = Field(..., example=\\\"James Bond\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class MyMsgUrl(BaseModel):\\n\",\n    \"    info: MyInfo = Field(..., example=dict(mobile=\\\"+385987654321\\\", name=\\\"James Bond\\\"))\\n\",\n    \"    url: HttpUrl = Field(..., example=\\\"https://sis.gov.uk/agents/007\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class MyMsgEmail(BaseModel):\\n\",\n    \"    msg_url: MyMsgUrl = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=dict(\\n\",\n    \"            info=dict(mobile=\\\"+385987654321\\\", name=\\\"James Bond\\\"),\\n\",\n    \"            url=\\\"https://sis.gov.uk/agents/007\\\",\\n\",\n    \"        ),\\n\",\n    \"    )\\n\",\n    \"    email: EmailStr = Field(..., example=\\\"agent-007@sis.gov.uk\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def setup_testing_app(bootstrap_servers=None, override_bootstrap_servers=None):\\n\",\n    \"    app = create_testing_app(bootstrap_servers=bootstrap_servers)\\n\",\n    \"\\n\",\n    \"    host, port = None, None\\n\",\n    \"    if override_bootstrap_servers is not None:\\n\",\n    \"        host, port = override_bootstrap_servers.split(\\\":\\\")\\n\",\n    \"\\n\",\n    \"    override_broker = {\\n\",\n    \"        \\\"localhost\\\": {\\n\",\n    \"            \\\"url\\\": host if host is not None else \\\"localhost\\\",\\n\",\n    \"            \\\"name\\\": \\\"development\\\",\\n\",\n    \"            \\\"description\\\": \\\"Local (dev) Kafka broker\\\",\\n\",\n    \"            \\\"port\\\": port if port is not None else \\\"9092\\\",\\n\",\n    \"        }\\n\",\n    \"    }\\n\",\n    \"\\n\",\n    \"    @app.consumes(\\\"my_topic_1\\\", description=\\\"Consumer description\\\")\\n\",\n    \"    def on_my_topic_one(msg: MyMsgUrl) -> None:\\n\",\n    \"        logger.debug(f\\\"on_my_topic_one(msg={msg},)\\\")\\n\",\n    \"\\n\",\n    \"    @app.consumes(topic=\\\"my_topic_1\\\", brokers=override_broker)\\n\",\n    \"    async def on_my_topic_1(msg: MyMsgEmail) -> None:\\n\",\n    \"        logger.debug(f\\\"on_my_topic_2(msg={msg},)\\\")\\n\",\n    \"\\n\",\n    \"    with pytest.raises(ValueError) as e:\\n\",\n    \"\\n\",\n    \"        @app.consumes()\\n\",\n    \"        def my_topic_3(msg: MyMsgEmail) -> None:\\n\",\n    \"            raise NotImplemented\\n\",\n    \"\\n\",\n    \"    @app.produces(description=\\\"Producer description\\\")\\n\",\n    \"    async def to_my_topic_3(url: str) -> MyMsgUrl:\\n\",\n    \"        logger.debug(f\\\"on_my_topic_3(msg={url}\\\")\\n\",\n    \"        return MyMsgUrl(info=MyInfo(\\\"+3851987654321\\\", \\\"Sean Connery\\\"), url=url)\\n\",\n    \"\\n\",\n    \"    @app.produces()\\n\",\n    \"    async def to_my_topic_4(msg: MyMsgEmail) -> MyMsgEmail:\\n\",\n    \"        logger.debug(f\\\"on_my_topic_4(msg={msg}\\\")\\n\",\n    \"        return msg\\n\",\n    \"\\n\",\n    \"    @app.produces(topic=\\\"my_topic_4\\\", brokers=override_broker)\\n\",\n    \"    async def to_my_topic_4_2(url: str) -> MyMsgUrl:\\n\",\n    \"        logger.debug(f\\\"on_my_topic_5(msg={url}\\\")\\n\",\n    \"        return MyMsgUrl(info=MyInfo(\\\"+3859123456789\\\", \\\"John Wayne\\\"), url=url)\\n\",\n    \"\\n\",\n    \"    @app.run_in_background()\\n\",\n    \"    async def long_bg_job():\\n\",\n    \"        logger.debug(f\\\"long_bg_job()\\\")\\n\",\n    \"        await asyncio.sleep(100)\\n\",\n    \"\\n\",\n    \"    return app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8a945425\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: run_in_background() : Adding function 'long_bg_job' as background task\\n\",\n      \"app._kafka_service_info=title='' version='' description='' contact=ContactInfo(name='Author', url=Url('https://www.google.com/'), email='noreply@gmail.com')\\n\",\n      \"app._kafka_brokers=brokers={'localhost': KafkaBroker(url='localhost', description='Local (dev) Kafka broker', port='9092', protocol='kafka', security=None)}\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"app = setup_testing_app()\\n\",\n    \"\\n\",\n    \"assert set(app._consumers_store.keys()) == set([\\\"my_topic_1_0\\\", \\\"my_topic_1_1\\\"])\\n\",\n    \"assert set(app._producers_store.keys()) == set(\\n\",\n    \"    [\\\"my_topic_3_0\\\", \\\"my_topic_4_0\\\", \\\"my_topic_4_1\\\"]\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"print(f\\\"app._kafka_service_info={app._kafka_service_info}\\\")\\n\",\n    \"print(f\\\"app._kafka_brokers={app._kafka_brokers}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bb2ea338\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def _populate_consumers(\\n\",\n    \"    self: FastKafka,\\n\",\n    \"    is_shutting_down_f: Callable[[], bool],\\n\",\n    \") -> None:\\n\",\n    \"    default_config: Dict[str, Any] = filter_using_signature(\\n\",\n    \"        fastkafka._aiokafka_imports.AIOKafkaConsumer, **self._kafka_config\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    bootstrap_server = self._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"\\n\",\n    \"    self._kafka_consumer_tasks = [\\n\",\n    \"        asyncio.create_task(\\n\",\n    \"            aiokafka_consumer_loop(\\n\",\n    \"                topic=\\\"_\\\".join(topic.split(\\\"_\\\")[:-1]),\\n\",\n    \"                decoder_fn=decoder_fn,\\n\",\n    \"                callback=consumer,\\n\",\n    \"                msg_type=signature(consumer).parameters[\\\"msg\\\"].annotation,\\n\",\n    \"                is_shutting_down_f=is_shutting_down_f,\\n\",\n    \"                executor=executor,\\n\",\n    \"                **{\\n\",\n    \"                    **default_config,\\n\",\n    \"                    **override_config,\\n\",\n    \"                    **{\\n\",\n    \"                        \\\"bootstrap_servers\\\": _get_broker_addr_list(\\n\",\n    \"                            kafka_brokers.brokers[bootstrap_server]\\n\",\n    \"                            if kafka_brokers is not None\\n\",\n    \"                            else self._kafka_brokers.brokers[bootstrap_server]\\n\",\n    \"                        )\\n\",\n    \"                    },\\n\",\n    \"                },\\n\",\n    \"            )\\n\",\n    \"        )\\n\",\n    \"        for topic, (\\n\",\n    \"            consumer,\\n\",\n    \"            decoder_fn,\\n\",\n    \"            executor,\\n\",\n    \"            kafka_brokers,\\n\",\n    \"            override_config,\\n\",\n    \"        ) in self._consumers_store.items()\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _shutdown_consumers(\\n\",\n    \"    self: FastKafka,\\n\",\n    \") -> None:\\n\",\n    \"    if self._kafka_consumer_tasks:\\n\",\n    \"        await asyncio.wait(self._kafka_consumer_tasks)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"df10b5e4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15992\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"Port 2181 is already in use\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=48943\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15993\\n\",\n      \"[INFO] __main__: run_in_background() : Adding function 'long_bg_job' as background task\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:15992'}\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:15993'}\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'my_topic_1'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'my_topic_1'}\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'my_topic_1'})\\n\",\n      \"[INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'my_topic_1'}\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[WARNING] aiokafka.cluster: Topic my_topic_1 is not available during auto-create initialization\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'my_topic_1': 0}. \\n\",\n      \"[WARNING] aiokafka.cluster: Topic my_topic_1 is not available during auto-create initialization\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'my_topic_1': 0}. \\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1886...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1886 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1516...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1516 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1088...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1088 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 718...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 718 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async with ApacheKafkaBroker(listener_port=15992) as bootstrap_server:\\n\",\n    \"    async with ApacheKafkaBroker(listener_port=15993) as override_bootstrap_server:\\n\",\n    \"        app = setup_testing_app(\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            override_bootstrap_servers=override_bootstrap_server,\\n\",\n    \"        )\\n\",\n    \"        app._populate_consumers(is_shutting_down_f=true_after(1))\\n\",\n    \"        assert len(app._kafka_consumer_tasks) == 2\\n\",\n    \"\\n\",\n    \"        await app._shutdown_consumers()\\n\",\n    \"\\n\",\n    \"        assert all([t.done() for t in app._kafka_consumer_tasks])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7d6ee2b3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# TODO: Add passing of vars\\n\",\n    \"async def _create_producer(  # type: ignore\\n\",\n    \"    *,\\n\",\n    \"    callback: ProduceCallable,\\n\",\n    \"    default_config: Dict[str, Any],\\n\",\n    \"    override_config: Dict[str, Any],\\n\",\n    \"    bootstrap_servers: Union[str, List[str]],\\n\",\n    \"    producers_list: List[fastkafka._aiokafka_imports.AIOKafkaProducer],\\n\",\n    \") -> fastkafka._aiokafka_imports.AIOKafkaProducer:\\n\",\n    \"    \\\"\\\"\\\"Creates a producer\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        callback: A callback function that is called when the producer is ready.\\n\",\n    \"        producer: An existing producer to use.\\n\",\n    \"        default_config: A dictionary of default configuration values.\\n\",\n    \"        override_config: A dictionary of configuration values to override.\\n\",\n    \"        bootstrap_servers: Bootstrap servers to connect the producer to.\\n\",\n    \"        producers_list: A list of producers to add the new producer to.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A producer.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    config = {\\n\",\n    \"        **filter_using_signature(\\n\",\n    \"            fastkafka._aiokafka_imports.AIOKafkaProducer, **default_config\\n\",\n    \"        ),\\n\",\n    \"        **filter_using_signature(\\n\",\n    \"            fastkafka._aiokafka_imports.AIOKafkaProducer, **override_config\\n\",\n    \"        ),\\n\",\n    \"        **{\\\"bootstrap_servers\\\": bootstrap_servers},\\n\",\n    \"    }\\n\",\n    \"\\n\",\n    \"    producer = fastkafka._aiokafka_imports.AIOKafkaProducer(**config)\\n\",\n    \"    logger.info(\\n\",\n    \"        f\\\"_create_producer() : created producer using the config: '{sanitize_kafka_config(**config)}'\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await producer.start()\\n\",\n    \"\\n\",\n    \"    producers_list.append(producer)\\n\",\n    \"\\n\",\n    \"    return producer\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _populate_producers(self: FastKafka) -> None:\\n\",\n    \"    \\\"\\\"\\\"Populates the producers for the FastKafka instance.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        self: The FastKafka instance.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        None.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    default_config: Dict[str, Any] = self._kafka_config\\n\",\n    \"    bootstrap_server = default_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"\\n\",\n    \"    self._producers_list = []\\n\",\n    \"    self._producers_store.update(\\n\",\n    \"        {\\n\",\n    \"            topic: (\\n\",\n    \"                callback,\\n\",\n    \"                await _create_producer(\\n\",\n    \"                    callback=callback,\\n\",\n    \"                    default_config=default_config,\\n\",\n    \"                    override_config=override_config,\\n\",\n    \"                    bootstrap_servers=_get_broker_addr_list(\\n\",\n    \"                        kafka_brokers.brokers[bootstrap_server]\\n\",\n    \"                        if kafka_brokers is not None\\n\",\n    \"                        else self._kafka_brokers.brokers[bootstrap_server]\\n\",\n    \"                    ),\\n\",\n    \"                    producers_list=self._producers_list,\\n\",\n    \"                ),\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                override_config,\\n\",\n    \"            )\\n\",\n    \"            for topic, (\\n\",\n    \"                callback,\\n\",\n    \"                _,\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                override_config,\\n\",\n    \"            ) in self._producers_store.items()\\n\",\n    \"        }\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _shutdown_producers(self: FastKafka) -> None:\\n\",\n    \"    [await producer.stop() for producer in self._producers_list[::-1]]\\n\",\n    \"    # Remove references to stale producers\\n\",\n    \"    self._producers_list = []\\n\",\n    \"    self._producers_store.update(\\n\",\n    \"        {\\n\",\n    \"            topic: (\\n\",\n    \"                callback,\\n\",\n    \"                None,\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                override_config,\\n\",\n    \"            )\\n\",\n    \"            for topic, (\\n\",\n    \"                callback,\\n\",\n    \"                _,\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                override_config,\\n\",\n    \"            ) in self._producers_store.items()\\n\",\n    \"        }\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0546037d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15992\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"Port 2181 is already in use\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=34285\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15993\\n\",\n      \"[INFO] __main__: run_in_background() : Adding function 'long_bg_job' as background task\\n\",\n      \"{'my_topic_3_0': (<function setup_testing_app.<locals>.to_my_topic_3>, None, KafkaBrokers(brokers={'localhost': KafkaBroker(url='127.0.0.1', description='Local (dev) Kafka broker', port='15992', protocol='kafka', security=None)}), {}), 'my_topic_4_0': (<function setup_testing_app.<locals>.to_my_topic_4>, None, KafkaBrokers(brokers={'localhost': KafkaBroker(url='127.0.0.1', description='Local (dev) Kafka broker', port='15992', protocol='kafka', security=None)}), {}), 'my_topic_4_1': (<function setup_testing_app.<locals>.to_my_topic_4_2>, None, KafkaBrokers(brokers={'localhost': KafkaBroker(url='127.0.0.1', description='Local (dev) Kafka broker', port='15993', protocol='kafka', security=None)}), {})}\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15993'}'\\n\",\n      \"{'my_topic_3_0': (<function setup_testing_app.<locals>.to_my_topic_3>, <aiokafka.producer.producer.AIOKafkaProducer object>, KafkaBrokers(brokers={'localhost': KafkaBroker(url='127.0.0.1', description='Local (dev) Kafka broker', port='15992', protocol='kafka', security=None)}), {}), 'my_topic_4_0': (<function setup_testing_app.<locals>.to_my_topic_4>, <aiokafka.producer.producer.AIOKafkaProducer object>, KafkaBrokers(brokers={'localhost': KafkaBroker(url='127.0.0.1', description='Local (dev) Kafka broker', port='15992', protocol='kafka', security=None)}), {}), 'my_topic_4_1': (<function setup_testing_app.<locals>.to_my_topic_4_2>, <aiokafka.producer.producer.AIOKafkaProducer object>, KafkaBrokers(brokers={'localhost': KafkaBroker(url='127.0.0.1', description='Local (dev) Kafka broker', port='15993', protocol='kafka', security=None)}), {})}\\n\",\n      \"[<aiokafka.producer.producer.AIOKafkaProducer object>, <aiokafka.producer.producer.AIOKafkaProducer object>, <aiokafka.producer.producer.AIOKafkaProducer object>]\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15993'}'\\n\",\n      \"[<aiokafka.producer.producer.AIOKafkaProducer object>, <aiokafka.producer.producer.AIOKafkaProducer object>, <aiokafka.producer.producer.AIOKafkaProducer object>]\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 3518...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 3518 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 3149...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 3149 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 2720...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 2720 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 2350...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 2350 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async with ApacheKafkaBroker(listener_port=15992) as bootstrap_server:\\n\",\n    \"    async with ApacheKafkaBroker(listener_port=15993) as override_bootstrap_server:\\n\",\n    \"        app = setup_testing_app(\\n\",\n    \"            bootstrap_servers=bootstrap_server,\\n\",\n    \"            override_bootstrap_servers=override_bootstrap_server,\\n\",\n    \"        )\\n\",\n    \"        print(app._producers_store)\\n\",\n    \"        await app._populate_producers()\\n\",\n    \"        print(app._producers_store)\\n\",\n    \"        assert len(app._producers_list) == 3\\n\",\n    \"        print(app._producers_list)\\n\",\n    \"        await app._shutdown_producers()\\n\",\n    \"\\n\",\n    \"        # One more time for reentrancy\\n\",\n    \"        await app._populate_producers()\\n\",\n    \"        assert len(app._producers_list) == 3\\n\",\n    \"        print(app._producers_list)\\n\",\n    \"        await app._shutdown_producers()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2b15e020\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _populate_bg_tasks(\\n\",\n    \"    self: FastKafka,\\n\",\n    \") -> None:\\n\",\n    \"    def _start_bg_task(task: Callable[..., Coroutine[Any, Any, Any]]) -> asyncio.Task:\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"_populate_bg_tasks() : Starting background task '{task.__name__}'\\\"\\n\",\n    \"        )\\n\",\n    \"        return asyncio.create_task(task(), name=task.__name__)\\n\",\n    \"\\n\",\n    \"    self._running_bg_tasks = [_start_bg_task(task) for task in self._scheduled_bg_tasks]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _shutdown_bg_tasks(\\n\",\n    \"    self: FastKafka,\\n\",\n    \") -> None:\\n\",\n    \"    for task in self._running_bg_tasks:\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"_shutdown_bg_tasks() : Cancelling background task '{task.get_name()}'\\\"\\n\",\n    \"        )\\n\",\n    \"        task.cancel()\\n\",\n    \"\\n\",\n    \"    for task in self._running_bg_tasks:\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"_shutdown_bg_tasks() : Waiting for background task '{task.get_name()}' to finish\\\"\\n\",\n    \"        )\\n\",\n    \"        try:\\n\",\n    \"            await task\\n\",\n    \"        except asyncio.CancelledError:\\n\",\n    \"            pass\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"_shutdown_bg_tasks() : Execution finished for background task '{task.get_name()}'\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d4c687d9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15992\\n\",\n      \"[INFO] __main__: run_in_background() : Adding function 'long_bg_job' as background task\\n\",\n      \"[INFO] __main__: run_in_background() : Adding function 'long_bg_job' as background task\\n\",\n      \"[INFO] __main__: _populate_bg_tasks() : Starting background task 'long_bg_job'\\n\",\n      \"[INFO] __main__: _populate_bg_tasks() : Starting background task 'long_bg_job'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Cancelling background task 'long_bg_job'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Cancelling background task 'long_bg_job'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Waiting for background task 'long_bg_job' to finish\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Execution finished for background task 'long_bg_job'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Waiting for background task 'long_bg_job' to finish\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Execution finished for background task 'long_bg_job'\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 4348...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 4348 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 3977...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 3977 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async with ApacheKafkaBroker(listener_port=15992) as bootstrap_server:\\n\",\n    \"    app = setup_testing_app(bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"    @app.run_in_background()\\n\",\n    \"    async def long_bg_job():\\n\",\n    \"        logger.debug(f\\\"new_long_bg_job()\\\")\\n\",\n    \"        await asyncio.sleep(100)\\n\",\n    \"\\n\",\n    \"    await app._populate_bg_tasks()\\n\",\n    \"    assert len(app._scheduled_bg_tasks) == 2\\n\",\n    \"    assert len(app._running_bg_tasks) == 2\\n\",\n    \"    await app._shutdown_bg_tasks()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"13bd84f1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _start(self: FastKafka) -> None:\\n\",\n    \"    def is_shutting_down_f(self: FastKafka = self) -> bool:\\n\",\n    \"        return self._is_shutting_down\\n\",\n    \"\\n\",\n    \"    #     self.create_docs()\\n\",\n    \"    await self._populate_producers()\\n\",\n    \"    self._populate_consumers(is_shutting_down_f)\\n\",\n    \"    await self._populate_bg_tasks()\\n\",\n    \"\\n\",\n    \"    self._is_started = True\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"async def _stop(self: FastKafka) -> None:\\n\",\n    \"    self._is_shutting_down = True\\n\",\n    \"\\n\",\n    \"    await self._shutdown_bg_tasks()\\n\",\n    \"    await self._shutdown_consumers()\\n\",\n    \"    await self._shutdown_producers()\\n\",\n    \"\\n\",\n    \"    self._is_shutting_down = False\\n\",\n    \"    self._is_started = False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"18b199a8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15992\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 5160...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 5160 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 4791...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 4791 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test app reentrancy\\n\",\n    \"\\n\",\n    \"async with ApacheKafkaBroker(listener_port=15992) as bootstrap_server:\\n\",\n    \"    with mock_AIOKafkaProducer_send() as mock:\\n\",\n    \"        app = create_testing_app(bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"        @app.produces()\\n\",\n    \"        async def to_my_test_topic(mobile: str, url: str) -> MyMsgUrl:\\n\",\n    \"            msg = MyMsgUrl(info=dict(mobile=mobile, name=\\\"James Bond\\\"), url=url)\\n\",\n    \"            return msg\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            await app._start()\\n\",\n    \"            await app.to_my_test_topic(mobile=\\\"+385912345678\\\", url=\\\"https://www.vip.hr\\\")\\n\",\n    \"        finally:\\n\",\n    \"            await app._stop()\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            await app._start()\\n\",\n    \"            await app.to_my_test_topic(mobile=\\\"+385987654321\\\", url=\\\"https://www.ht.hr\\\")\\n\",\n    \"        finally:\\n\",\n    \"            await app._stop()\\n\",\n    \"\\n\",\n    \"        mock.assert_has_calls(\\n\",\n    \"            [\\n\",\n    \"                unittest.mock.call(\\n\",\n    \"                    \\\"my_test_topic\\\",\\n\",\n    \"                    b'{\\\"info\\\":{\\\"mobile\\\":\\\"+385912345678\\\",\\\"name\\\":\\\"James Bond\\\"},\\\"url\\\":\\\"https://www.vip.hr/\\\"}',\\n\",\n    \"                    key=None,\\n\",\n    \"                ),\\n\",\n    \"                unittest.mock.call(\\n\",\n    \"                    \\\"my_test_topic\\\",\\n\",\n    \"                    b'{\\\"info\\\":{\\\"mobile\\\":\\\"+385987654321\\\",\\\"name\\\":\\\"James Bond\\\"},\\\"url\\\":\\\"https://www.ht.hr/\\\"}',\\n\",\n    \"                    key=None,\\n\",\n    \"                ),\\n\",\n    \"            ]\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ff9cfce8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15992\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:15992'}'\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 5975...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 5975 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 5604...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 5604 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# mock up send method offastkafka._aiokafka_imports.AIOKafkaProducer\\n\",\n    \"async with ApacheKafkaBroker(listener_port=15992) as bootstrap_server:\\n\",\n    \"    with mock_AIOKafkaProducer_send() as mock:\\n\",\n    \"        app = create_testing_app(bootstrap_servers=bootstrap_server)\\n\",\n    \"\\n\",\n    \"        @app.produces()\\n\",\n    \"        async def to_my_test_topic(mobile: str, url: str) -> MyMsgUrl:\\n\",\n    \"            msg = MyMsgUrl(info=dict(mobile=mobile, name=\\\"James Bond\\\"), url=url)\\n\",\n    \"            return msg\\n\",\n    \"\\n\",\n    \"        @app.produces()\\n\",\n    \"        async def to_my_test_topic_2(mobile: str, url: str) -> MyMsgUrl:\\n\",\n    \"            msg = MyMsgUrl(info=dict(mobile=mobile, name=\\\"James Bond\\\"), url=url)\\n\",\n    \"            return msg\\n\",\n    \"\\n\",\n    \"        try:\\n\",\n    \"            await app._start()\\n\",\n    \"            await to_my_test_topic(mobile=\\\"+385912345678\\\", url=\\\"https://www.vip.hr\\\")\\n\",\n    \"            await to_my_test_topic_2(mobile=\\\"+385987654321\\\", url=\\\"https://www.ht.hr\\\")\\n\",\n    \"        finally:\\n\",\n    \"            await app._stop()\\n\",\n    \"\\n\",\n    \"        mock.assert_has_calls(\\n\",\n    \"            [\\n\",\n    \"                unittest.mock.call(\\n\",\n    \"                    \\\"my_test_topic\\\",\\n\",\n    \"                    b'{\\\"info\\\":{\\\"mobile\\\":\\\"+385912345678\\\",\\\"name\\\":\\\"James Bond\\\"},\\\"url\\\":\\\"https://www.vip.hr/\\\"}',\\n\",\n    \"                    key=None,\\n\",\n    \"                ),\\n\",\n    \"                unittest.mock.call(\\n\",\n    \"                    \\\"my_test_topic_2\\\",\\n\",\n    \"                    b'{\\\"info\\\":{\\\"mobile\\\":\\\"+385987654321\\\",\\\"name\\\":\\\"James Bond\\\"},\\\"url\\\":\\\"https://www.ht.hr/\\\"}',\\n\",\n    \"                    key=None,\\n\",\n    \"                ),\\n\",\n    \"            ]\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6b114fc4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15992\\n\",\n      \"[INFO] __main__: run_in_background() : Adding function 'bg_task' as background task\\n\",\n      \"[INFO] __main__: run_in_background() : Adding function 'bg_task_second' as background task\\n\",\n      \"[INFO] __main__: _populate_bg_tasks() : Starting background task 'bg_task'\\n\",\n      \"[INFO] __main__: _populate_bg_tasks() : Starting background task 'bg_task_second'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Cancelling background task 'bg_task'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Cancelling background task 'bg_task_second'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Waiting for background task 'bg_task' to finish\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Execution finished for background task 'bg_task'\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Waiting for background task 'bg_task_second' to finish\\n\",\n      \"[INFO] __main__: _shutdown_bg_tasks() : Execution finished for background task 'bg_task_second'\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 6787...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 6787 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 6418...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 6418 terminated.\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async with ApacheKafkaBroker(listener_port=15992) as bootstrap_server:\\n\",\n    \"    app = create_testing_app(bootstrap_servers=bootstrap_server)\\n\",\n    \"    fast_task = unittest.mock.Mock()\\n\",\n    \"    long_task = unittest.mock.Mock()\\n\",\n    \"\\n\",\n    \"    @app.run_in_background()\\n\",\n    \"    async def bg_task():\\n\",\n    \"        fast_task()\\n\",\n    \"        await asyncio.sleep(100)\\n\",\n    \"        long_task()\\n\",\n    \"\\n\",\n    \"    fast_task_second = unittest.mock.Mock()\\n\",\n    \"    long_task_second = unittest.mock.Mock()\\n\",\n    \"\\n\",\n    \"    @app.run_in_background()\\n\",\n    \"    async def bg_task_second():\\n\",\n    \"        fast_task_second()\\n\",\n    \"        await asyncio.sleep(100)\\n\",\n    \"        long_task_second()\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"        await app._start()\\n\",\n    \"        await asyncio.sleep(5)\\n\",\n    \"    finally:\\n\",\n    \"        await app._stop()\\n\",\n    \"\\n\",\n    \"    fast_task.assert_called()\\n\",\n    \"    long_task.assert_not_called()\\n\",\n    \"\\n\",\n    \"    fast_task_second.assert_called()\\n\",\n    \"    long_task_second.assert_not_called()\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0c4bb0ac\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:15992\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:15992\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 7600...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 7600 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 7231...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 7231 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# test lifespan hook\\n\",\n    \"\\n\",\n    \"global_dict = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    try:\\n\",\n    \"        global_dict[\\\"set_var\\\"] = 123\\n\",\n    \"        global_dict[\\\"app\\\"] = app\\n\",\n    \"        yield\\n\",\n    \"    finally:\\n\",\n    \"        global_dict[\\\"set_var\\\"] = 321\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(apply_nest_asyncio=True, listener_port=15992) as bootstrap_servers:\\n\",\n    \"    host, port = bootstrap_servers.split(\\\":\\\")\\n\",\n    \"\\n\",\n    \"    kafka_app = FastKafka(\\n\",\n    \"        kafka_brokers={\\n\",\n    \"            \\\"localhost\\\": {\\n\",\n    \"                \\\"url\\\": host if host is not None else \\\"localhost\\\",\\n\",\n    \"                \\\"name\\\": \\\"development\\\",\\n\",\n    \"                \\\"description\\\": \\\"Local (dev) Kafka broker\\\",\\n\",\n    \"                \\\"port\\\": port if port is not None else 9092,\\n\",\n    \"            }\\n\",\n    \"        },\\n\",\n    \"        root_path=\\\"/tmp/000_FastKafka\\\",\\n\",\n    \"        lifespan=lifespan,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    kafka_app.set_kafka_broker(kafka_broker_name=\\\"localhost\\\")\\n\",\n    \"\\n\",\n    \"    # Dict unchanged\\n\",\n    \"    assert global_dict == {}\\n\",\n    \"\\n\",\n    \"    async with kafka_app:\\n\",\n    \"        # Lifespan aenter triggered\\n\",\n    \"        assert global_dict[\\\"set_var\\\"] == 123\\n\",\n    \"        # Kafka app reference passed\\n\",\n    \"        assert global_dict[\\\"app\\\"] == kafka_app\\n\",\n    \"\\n\",\n    \"    # Lifespan aexit triggered\\n\",\n    \"    assert global_dict[\\\"set_var\\\"] == 321\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fe047872\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Documentation generation\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"880411a6\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def create_docs(self: FastKafka) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Create the asyncapi documentation based on the configured consumers and producers.\\n\",\n    \"\\n\",\n    \"    This function exports the asyncapi specification based on the configured consumers\\n\",\n    \"    and producers in the FastKafka instance. It generates the asyncapi documentation by\\n\",\n    \"    extracting the topics and callbacks from the consumers and producers.\\n\",\n    \"\\n\",\n    \"    Note:\\n\",\n    \"        The asyncapi documentation is saved to the location specified by the `_asyncapi_path`\\n\",\n    \"        attribute of the FastKafka instance.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    (self._asyncapi_path / \\\"docs\\\").mkdir(exist_ok=True, parents=True)\\n\",\n    \"    (self._asyncapi_path / \\\"spec\\\").mkdir(exist_ok=True, parents=True)\\n\",\n    \"    export_async_spec(\\n\",\n    \"        consumers={\\n\",\n    \"            remove_suffix(topic) if topic.endswith(\\\"_0\\\") else topic: callback\\n\",\n    \"            for topic, (callback, _, _, _, _) in self._consumers_store.items()\\n\",\n    \"        },\\n\",\n    \"        producers={\\n\",\n    \"            remove_suffix(topic) if topic.endswith(\\\"_0\\\") else topic: callback\\n\",\n    \"            for topic, (callback, _, _, _) in self._producers_store.items()\\n\",\n    \"        },\\n\",\n    \"        kafka_brokers=self._kafka_brokers,\\n\",\n    \"        kafka_service_info=self._kafka_service_info,\\n\",\n    \"        asyncapi_path=self._asyncapi_path,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2ec33cb1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"expected = \\\"\\\"\\\"asyncapi: 2.5.0\\n\",\n    \"channels:\\n\",\n    \"  my_topic_1:\\n\",\n    \"    subscribe:\\n\",\n    \"      message:\\n\",\n    \"        $ref: '#/components/messages/MyMsgUrl'\\n\",\n    \"      description: \\\"Consumer description\\\"\\n\",\n    \"  my_topic_1_1:\\n\",\n    \"    subscribe:\\n\",\n    \"      message:\\n\",\n    \"        $ref: '#/components/messages/MyMsgEmail'\\n\",\n    \"  my_topic_3:\\n\",\n    \"    publish:\\n\",\n    \"      message:\\n\",\n    \"        $ref: '#/components/messages/MyMsgUrl'\\n\",\n    \"      description: \\\"Producer description\\\"\\n\",\n    \"  my_topic_4:\\n\",\n    \"    publish:\\n\",\n    \"      message:\\n\",\n    \"        $ref: '#/components/messages/MyMsgEmail'\\n\",\n    \"  my_topic_4_1:\\n\",\n    \"    publish:\\n\",\n    \"      message:\\n\",\n    \"        $ref: '#/components/messages/MyMsgUrl'\\n\",\n    \"components:\\n\",\n    \"  messages:\\n\",\n    \"    MyMsgEmail:\\n\",\n    \"      payload:\\n\",\n    \"        example:\\n\",\n    \"          email: agent-007@sis.gov.uk\\n\",\n    \"          msg_url:\\n\",\n    \"            info:\\n\",\n    \"              mobile: '+385987654321'\\n\",\n    \"              name: James Bond\\n\",\n    \"            url: https://sis.gov.uk/agents/007\\n\",\n    \"        properties:\\n\",\n    \"          email:\\n\",\n    \"            example: agent-007@sis.gov.uk\\n\",\n    \"            format: email\\n\",\n    \"            title: Email\\n\",\n    \"            type: string\\n\",\n    \"          msg_url:\\n\",\n    \"            allOf:\\n\",\n    \"            - $ref: '#/components/messages/MyMsgUrl'\\n\",\n    \"            example:\\n\",\n    \"              info:\\n\",\n    \"                mobile: '+385987654321'\\n\",\n    \"                name: James Bond\\n\",\n    \"              url: https://sis.gov.uk/agents/007\\n\",\n    \"        required:\\n\",\n    \"        - msg_url\\n\",\n    \"        - email\\n\",\n    \"        title: MyMsgEmail\\n\",\n    \"        type: object\\n\",\n    \"    MyMsgUrl:\\n\",\n    \"      payload:\\n\",\n    \"        example:\\n\",\n    \"          info:\\n\",\n    \"            mobile: '+385987654321'\\n\",\n    \"            name: James Bond\\n\",\n    \"          url: https://sis.gov.uk/agents/007\\n\",\n    \"        properties:\\n\",\n    \"          info:\\n\",\n    \"            allOf:\\n\",\n    \"            - $ref: '#/components/schemas/MyInfo'\\n\",\n    \"            example:\\n\",\n    \"              mobile: '+385987654321'\\n\",\n    \"              name: James Bond\\n\",\n    \"          url:\\n\",\n    \"            example: https://sis.gov.uk/agents/007\\n\",\n    \"            format: uri\\n\",\n    \"            maxLength: 2083\\n\",\n    \"            minLength: 1\\n\",\n    \"            title: Url\\n\",\n    \"            type: string\\n\",\n    \"        required:\\n\",\n    \"        - info\\n\",\n    \"        - url\\n\",\n    \"        title: MyMsgUrl\\n\",\n    \"        type: object\\n\",\n    \"  schemas:\\n\",\n    \"    MyInfo:\\n\",\n    \"      payload:\\n\",\n    \"        properties:\\n\",\n    \"          mobile:\\n\",\n    \"            example: '+385987654321'\\n\",\n    \"            title: Mobile\\n\",\n    \"            type: string\\n\",\n    \"          name:\\n\",\n    \"            example: James Bond\\n\",\n    \"            title: Name\\n\",\n    \"            type: string\\n\",\n    \"        required:\\n\",\n    \"        - mobile\\n\",\n    \"        - name\\n\",\n    \"        title: MyInfo\\n\",\n    \"        type: object\\n\",\n    \"  securitySchemes: {}\\n\",\n    \"info:\\n\",\n    \"  contact:\\n\",\n    \"    email: noreply@gmail.com\\n\",\n    \"    name: Author\\n\",\n    \"    url: https://www.google.com/\\n\",\n    \"  description: ''\\n\",\n    \"  title: ''\\n\",\n    \"  version: ''\\n\",\n    \"servers:\\n\",\n    \"  localhost:\\n\",\n    \"    description: Local (dev) Kafka broker\\n\",\n    \"    protocol: kafka\\n\",\n    \"    url: localhost\\n\",\n    \"    variables:\\n\",\n    \"      port:\\n\",\n    \"        default: '9092'\\n\",\n    \"\\\"\\\"\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c407913e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: run_in_background() : Adding function 'long_bg_job' as background task\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Old async specifications at '/tmp/000_FastKafka/asyncapi/spec/asyncapi.yml' does not exist.\\n\",\n      \"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/tmp/000_FastKafka/asyncapi/spec/asyncapi.yml'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Async docs generated at '/tmp/000_FastKafka/asyncapi/docs'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag /tmp/000_FastKafka/asyncapi/spec/asyncapi.yml @asyncapi/html-template -o /tmp/000_FastKafka/asyncapi/docs --force-write'npm WARN deprecated har-validator@5.1.5: this library is no longer supported\\n\",\n      \"npm WARN deprecated debuglog@1.0.1: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.\\n\",\n      \"npm WARN deprecated uuid@3.4.0: Please upgrade  to version 7 or higher.  Older versions may use Math.random() in certain circumstances, which is known to be problematic.  See https://v8.dev/blog/math-random for details.\\n\",\n      \"npm WARN deprecated request@2.88.2: request has been deprecated, see https://github.com/request/request/issues/3142\\n\",\n      \"npm WARN deprecated readdir-scoped-modules@1.1.0: This functionality has been moved to @npmcli/fs\\n\",\n      \"npm WARN deprecated @npmcli/move-file@1.1.2: This functionality has been moved to @npmcli/fs\\n\",\n      \"npm WARN deprecated mkdirp@0.3.5: Legacy versions of mkdirp are no longer supported. Please update to mkdirp 1.x. (Note that the API surface has changed to use Promises in 1.x.)\\n\",\n      \"npm WARN deprecated mkdirp@0.3.5: Legacy versions of mkdirp are no longer supported. Please update to mkdirp 1.x. (Note that the API surface has changed to use Promises in 1.x.)\\n\",\n      \"\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/tmp/000_FastKafka/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"npm notice \\n\",\n      \"npm notice New minor version of npm available! 9.6.3 -> 9.8.1\\n\",\n      \"npm notice Changelog: <https://github.com/npm/cli/releases/tag/v9.8.1>\\n\",\n      \"npm notice Run `npm install -g npm@9.8.1` to update!\\n\",\n      \"npm notice \\n\",\n      \"\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"d1, d2 = None, None\\n\",\n    \"\\n\",\n    \"docs_path = Path(\\\"/tmp/000_FastKafka/asyncapi/spec/asyncapi.yml\\\")\\n\",\n    \"if docs_path.exists():\\n\",\n    \"    os.remove(docs_path)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def test_me():\\n\",\n    \"    global d1\\n\",\n    \"    global d2\\n\",\n    \"    app = setup_testing_app()\\n\",\n    \"    app.create_docs()\\n\",\n    \"    with open(docs_path) as specs:\\n\",\n    \"        d1 = yaml.safe_load(specs)\\n\",\n    \"        d2 = yaml.safe_load(expected)\\n\",\n    \"        assert d1 == d2, f\\\"{d1} != {d2}\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"asyncio.run(test_me())\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"5cafbc67\",\n   \"metadata\": {},\n   \"source\": [\n    \"## App mocks\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f4cdce7a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class AwaitedMock:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Class representing an awaited mock object.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        o: The original object to be wrapped.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    @staticmethod\\n\",\n    \"    def _await_for(f: Callable[..., Any]) -> Callable[..., Any]:\\n\",\n    \"        @delegates(f)\\n\",\n    \"        async def inner(\\n\",\n    \"            *args: Any, f: Callable[..., Any] = f, timeout: int = 60, **kwargs: Any\\n\",\n    \"        ) -> Any:\\n\",\n    \"            \\\"\\\"\\\"\\n\",\n    \"            Decorator to await the execution of a function.\\n\",\n    \"\\n\",\n    \"            Args:\\n\",\n    \"                f: The function to be wrapped.\\n\",\n    \"\\n\",\n    \"            Returns:\\n\",\n    \"                The wrapped function.\\n\",\n    \"            \\\"\\\"\\\"\\n\",\n    \"            if inspect.iscoroutinefunction(f):\\n\",\n    \"                return await asyncio.wait_for(f(*args, **kwargs), timeout=timeout)\\n\",\n    \"            else:\\n\",\n    \"                t0 = datetime.now()\\n\",\n    \"                e: Optional[Exception] = None\\n\",\n    \"                while True:\\n\",\n    \"                    try:\\n\",\n    \"                        return f(*args, **kwargs)\\n\",\n    \"                    except Exception as _e:\\n\",\n    \"                        await asyncio.sleep(1)\\n\",\n    \"                        e = _e\\n\",\n    \"\\n\",\n    \"                    if datetime.now() - t0 > timedelta(seconds=timeout):\\n\",\n    \"                        break\\n\",\n    \"\\n\",\n    \"                raise e\\n\",\n    \"\\n\",\n    \"        return inner\\n\",\n    \"\\n\",\n    \"    def __init__(self, o: Any):\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initializes an instance of AwaitedMock.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            o: The original object to be wrapped.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self._o = o\\n\",\n    \"\\n\",\n    \"        for name in o.__dir__():\\n\",\n    \"            if not name.startswith(\\\"_\\\"):\\n\",\n    \"                f = getattr(o, name)\\n\",\n    \"                if inspect.ismethod(f):\\n\",\n    \"                    setattr(self, name, self._await_for(f))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8d95b332\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def create_mocks(self: FastKafka) -> None:\\n\",\n    \"    \\\"\\\"\\\"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock\\\"\\\"\\\"\\n\",\n    \"    app_methods = [f for f, _, _, _, _ in self._consumers_store.values()] + [\\n\",\n    \"        f for f, _, _, _ in self._producers_store.values()\\n\",\n    \"    ]\\n\",\n    \"    self.AppMocks = namedtuple(  # type: ignore\\n\",\n    \"        f\\\"{self.__class__.__name__}Mocks\\\", [f.__name__ for f in app_methods]\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    self.mocks = self.AppMocks(  # type: ignore\\n\",\n    \"        **{\\n\",\n    \"            f.__name__: AsyncMock() if inspect.iscoroutinefunction(f) else MagicMock()\\n\",\n    \"            for f in app_methods\\n\",\n    \"        }\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    self.awaited_mocks = self.AppMocks(  # type: ignore\\n\",\n    \"        **{name: AwaitedMock(mock) for name, mock in self.mocks._asdict().items()}\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    def add_mock(\\n\",\n    \"        f: Callable[..., Any], mock: Union[AsyncMock, MagicMock]\\n\",\n    \"    ) -> Callable[..., Any]:\\n\",\n    \"        \\\"\\\"\\\"Add call to mock when calling function f\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        @functools.wraps(f)\\n\",\n    \"        async def async_inner(\\n\",\n    \"            *args: Any, f: Callable[..., Any] = f, mock: AsyncMock = mock, **kwargs: Any\\n\",\n    \"        ) -> Any:\\n\",\n    \"            await mock(*deepcopy(args), **kwargs)\\n\",\n    \"            return await f(*args, **kwargs)\\n\",\n    \"\\n\",\n    \"        @functools.wraps(f)\\n\",\n    \"        def sync_inner(\\n\",\n    \"            *args: Any, f: Callable[..., Any] = f, mock: MagicMock = mock, **kwargs: Any\\n\",\n    \"        ) -> Any:\\n\",\n    \"            mock(*deepcopy(args), **kwargs)\\n\",\n    \"            return f(*args, **kwargs)\\n\",\n    \"\\n\",\n    \"        if inspect.iscoroutinefunction(f):\\n\",\n    \"            return async_inner\\n\",\n    \"        else:\\n\",\n    \"            return sync_inner\\n\",\n    \"\\n\",\n    \"    self._consumers_store.update(\\n\",\n    \"        {\\n\",\n    \"            name: (\\n\",\n    \"                add_mock(f, getattr(self.mocks, f.__name__)),\\n\",\n    \"                decoder_fn,\\n\",\n    \"                executor,\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                kwargs,\\n\",\n    \"            )\\n\",\n    \"            for name, (\\n\",\n    \"                f,\\n\",\n    \"                decoder_fn,\\n\",\n    \"                executor,\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                kwargs,\\n\",\n    \"            ) in self._consumers_store.items()\\n\",\n    \"        }\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    self._producers_store.update(\\n\",\n    \"        {\\n\",\n    \"            name: (\\n\",\n    \"                add_mock(f, getattr(self.mocks, f.__name__)),\\n\",\n    \"                producer,\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                kwargs,\\n\",\n    \"            )\\n\",\n    \"            for name, (\\n\",\n    \"                f,\\n\",\n    \"                producer,\\n\",\n    \"                kafka_brokers,\\n\",\n    \"                kwargs,\\n\",\n    \"            ) in self._producers_store.items()\\n\",\n    \"        }\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b0ed851c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(\\n\",\n    \"    topic=\\\"preprocessed_signals\\\",\\n\",\n    \"    brokers=dict(\\n\",\n    \"        localhost=[\\n\",\n    \"            dict(url=\\\"localhost\\\", port=9092),\\n\",\n    \"            dict(url=\\\"localhost\\\", port=9093),\\n\",\n    \"        ]\\n\",\n    \"    ),\\n\",\n    \")\\n\",\n    \"async def on_preprocessed_signals_second(msg: TestMsg):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ca781c10\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"app.create_mocks()\\n\",\n    \"app.mocks.on_preprocessed_signals.assert_not_awaited()\\n\",\n    \"app.mocks.on_preprocessed_signals_second.assert_not_awaited()\\n\",\n    \"app.mocks.to_predictions.assert_not_awaited()\\n\",\n    \"app.create_mocks()\\n\",\n    \"app.mocks.on_preprocessed_signals.assert_not_awaited()\\n\",\n    \"app.mocks.on_preprocessed_signals_second.assert_not_awaited()\\n\",\n    \"app.mocks.to_predictions.assert_not_awaited()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"89b07708\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with pytest.raises(AssertionError) as e:\\n\",\n    \"    await app.awaited_mocks.on_preprocessed_signals.assert_called_with(123, timeout=2)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"afa1957c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"app.create_mocks()\\n\",\n    \"app.mocks.on_preprocessed_signals.assert_not_awaited()\\n\",\n    \"await app.awaited_mocks.on_preprocessed_signals.assert_not_awaited(timeout=3)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7bdaa582\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def benchmark(\\n\",\n    \"    self: FastKafka,\\n\",\n    \"    interval: Union[int, timedelta] = 1,\\n\",\n    \"    *,\\n\",\n    \"    sliding_window_size: Optional[int] = None,\\n\",\n    \") -> Callable[[Callable[[I], Optional[O]]], Callable[[I], Optional[O]]]:\\n\",\n    \"    \\\"\\\"\\\"Decorator to benchmark produces/consumes functions\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        interval: Period to use to calculate throughput. If value is of type int,\\n\",\n    \"            then it will be used as seconds. If value is of type timedelta,\\n\",\n    \"            then it will be used as it is. default: 1 - one second\\n\",\n    \"        sliding_window_size: The size of the sliding window to use to calculate\\n\",\n    \"            average throughput. default: None - By default average throughput is\\n\",\n    \"            not calculated\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def _decorator(func: Callable[[I], Optional[O]]) -> Callable[[I], Optional[O]]:\\n\",\n    \"        func_name = f\\\"{func.__module__}.{func.__qualname__}\\\"\\n\",\n    \"\\n\",\n    \"        @wraps(func)\\n\",\n    \"        def wrapper(\\n\",\n    \"            *args: I,\\n\",\n    \"            **kwargs: I,\\n\",\n    \"        ) -> Optional[O]:\\n\",\n    \"            _benchmark(\\n\",\n    \"                interval=interval,\\n\",\n    \"                sliding_window_size=sliding_window_size,\\n\",\n    \"                func_name=func_name,\\n\",\n    \"                benchmark_results=self.benchmark_results,\\n\",\n    \"            )\\n\",\n    \"            return func(*args, **kwargs)\\n\",\n    \"\\n\",\n    \"        @wraps(func)\\n\",\n    \"        async def async_wrapper(\\n\",\n    \"            *args: I,\\n\",\n    \"            **kwargs: I,\\n\",\n    \"        ) -> Optional[O]:\\n\",\n    \"            _benchmark(\\n\",\n    \"                interval=interval,\\n\",\n    \"                sliding_window_size=sliding_window_size,\\n\",\n    \"                func_name=func_name,\\n\",\n    \"                benchmark_results=self.benchmark_results,\\n\",\n    \"            )\\n\",\n    \"            return await func(*args, **kwargs)  # type: ignore\\n\",\n    \"\\n\",\n    \"        if inspect.iscoroutinefunction(func):\\n\",\n    \"            return async_wrapper  # type: ignore\\n\",\n    \"        else:\\n\",\n    \"            return wrapper\\n\",\n    \"\\n\",\n    \"    return _decorator\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5c4fcda2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Hello I am over after 10k msgs\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Hello I am over after 10k msgs\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"for executor in [\\\"SequentialExecutor\\\", \\\"DynamicTaskExecutor\\\"]:\\n\",\n    \"\\n\",\n    \"    class TestMsg(BaseModel):\\n\",\n    \"        msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"    app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)))\\n\",\n    \"    # app.benchmark_results[\\\"test\\\"] = dict(count=0)\\n\",\n    \"\\n\",\n    \"    @app.consumes(executor=executor)\\n\",\n    \"    #@app.benchmark(interval=1, sliding_window_size=5)\\n\",\n    \"    async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"        await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"    @app.produces()\\n\",\n    \"    #@app.benchmark(interval=1, sliding_window_size=5)\\n\",\n    \"    async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"        #         print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"        return prediction\\n\",\n    \"\\n\",\n    \"    async with Tester(app) as tester:\\n\",\n    \"        for i in range(10_000):\\n\",\n    \"            await tester.to_preprocessed_signals(TestMsg(msg=f\\\"signal {i}\\\"))\\n\",\n    \"        print(\\\"Hello I am over after 10k msgs\\\")\\n\",\n    \"        await asyncio.sleep(5)\\n\",\n    \"        tester.mocks.on_predictions.assert_called()\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d213e250\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def fastapi_lifespan(\\n\",\n    \"    self: FastKafka, kafka_broker_name: str\\n\",\n    \") -> Callable[[\\\"FastAPI\\\"], AsyncIterator[None]]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Method for managing the lifespan of a FastAPI application with a specific Kafka broker.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        kafka_broker_name: The name of the Kafka broker to start FastKafka\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Lifespan function to use for initializing FastAPI\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    @asynccontextmanager\\n\",\n    \"    async def lifespan(fastapi_app: \\\"FastAPI\\\") -> AsyncIterator[None]:\\n\",\n    \"        self.set_kafka_broker(kafka_broker_name=kafka_broker_name)\\n\",\n    \"        async with self:\\n\",\n    \"            yield\\n\",\n    \"\\n\",\n    \"    return lifespan  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3b51932c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"app_for_tester = FastKafka(\\n\",\n    \"    kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)),\\n\",\n    \"    group_id=\\\"app_for_tester_group\\\",\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app_for_tester.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_app_for_tester_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    print(\\\"receving messages on app_for_tester\\\")\\n\",\n    \"    await to_app_for_tester_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app_for_tester.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_app_for_tester_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(\\\"sending predictions on app_for_tester\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def create_app_for_fastapi(port: int):\\n\",\n    \"    app_for_fastapi = FastKafka(\\n\",\n    \"        kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=port)),\\n\",\n    \"        group_id=\\\"app_for_fastapi_group\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    @app_for_fastapi.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"    async def on_app_for_fastapi_preprocessed_signals(msg: TestMsg):\\n\",\n    \"        print(\\\"receving messages on app_for_fastapi\\\")\\n\",\n    \"        await to_app_for_fastapi_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"    @app_for_fastapi.produces(topic=\\\"predictions\\\")\\n\",\n    \"    async def to_app_for_fastapi_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"        print(\\\"sending predictions on app_for_fastapi\\\")\\n\",\n    \"        return prediction\\n\",\n    \"\\n\",\n    \"    return app_for_fastapi\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7e753c66\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'app_for_tester_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Started server process [8091]\\n\",\n      \"INFO:     Waiting for application startup.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'app_for_fastapi_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Application startup complete.\\n\",\n      \"INFO:     Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit)\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     127.0.0.1:35572 - \\\"GET /predict HTTP/1.1\\\" 200 OK\\n\",\n      \"receving messages on app_for_tester\\n\",\n      \"sending predictions on app_for_tester\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Shutting down\\n\",\n      \"INFO:     Waiting for application shutdown.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Application shutdown complete.\\n\",\n      \"INFO:     Finished server process [8091]\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Exception raised e=AssertionError(1)\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def run_uvicorn():\\n\",\n    \"    fastapi_app = FastAPI(lifespan=app_for_fastapi.fastapi_lifespan(\\\"localhost\\\"))\\n\",\n    \"\\n\",\n    \"    @fastapi_app.get(\\\"/predict\\\")\\n\",\n    \"    async def predict():\\n\",\n    \"        return {\\\"result\\\": \\\"hello\\\"}\\n\",\n    \"\\n\",\n    \"    uvicorn.run(\\n\",\n    \"        fastapi_app,\\n\",\n    \"        host=\\\"0.0.0.0\\\",\\n\",\n    \"        port=8000,\\n\",\n    \"        reload=False,\\n\",\n    \"        log_level=\\\"debug\\\",\\n\",\n    \"        workers=1,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(app_for_tester) as tester:\\n\",\n    \"    app_for_fastapi = create_app_for_fastapi(port=9092)\\n\",\n    \"    with run_in_process(run_uvicorn) as p:\\n\",\n    \"        await asyncio.sleep(3)\\n\",\n    \"        res = requests.get(\\\"http://127.0.0.1:8000/predict\\\")\\n\",\n    \"        assert res.ok\\n\",\n    \"\\n\",\n    \"        await tester.to_preprocessed_signals(TestMsg(msg=f\\\"signal 10\\\"))\\n\",\n    \"        await asyncio.sleep(3)\\n\",\n    \"        assert (\\n\",\n    \"            tester.mocks.on_predictions.call_count == 2\\n\",\n    \"        ), tester.mocks.on_predictions.call_count\\n\",\n    \"\\n\",\n    \"    p.close()\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/016_Tester.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"609bc3f5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _application.tester\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b72449ec\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import collections\\n\",\n    \"import inspect\\n\",\n    \"from unittest.mock import AsyncMock, MagicMock\\n\",\n    \"import json\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"from itertools import groupby\\n\",\n    \"from typing import *\\n\",\n    \"from types import ModuleType\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel\\n\",\n    \"\\n\",\n    \"from fastkafka import KafkaEvent\\n\",\n    \"from fastkafka._application.app import FastKafka, AwaitedMock, _get_kafka_brokers\\n\",\n    \"from fastkafka._components.asyncapi import KafkaBroker, KafkaBrokers\\n\",\n    \"from fastkafka._components.helpers import unwrap_list_type\\n\",\n    \"from fastkafka._components.meta import delegates, export, patch\\n\",\n    \"from fastkafka._components.producer_decorator import unwrap_from_kafka_event\\n\",\n    \"from fastkafka._components.aiokafka_consumer_loop import ConsumeCallable\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import ApacheKafkaBroker\\n\",\n    \"from fastkafka._testing.in_memory_broker import InMemoryBroker\\n\",\n    \"from fastkafka._testing.local_redpanda_broker import LocalRedpandaBroker\\n\",\n    \"from fastkafka._components.helpers import remove_suffix\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2ee08fec\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import pytest\\n\",\n    \"from pydantic import Field\\n\",\n    \"\\n\",\n    \"from fastkafka import EventMetadata, KafkaEvent\\n\",\n    \"from fastkafka._components.logger import get_logger, suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2d75f36a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"14650b1a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"83484244\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ca915b3e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=\\\"9092\\\")))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"724f7abe\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_broker_spec(bootstrap_server: str) -> KafkaBroker:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Helper function to get the broker specification from the bootstrap server URL.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        bootstrap_server: The bootstrap server URL in the format \\\"<host>:<port>\\\".\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A KafkaBroker object representing the broker specification.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    return KafkaBroker(url=url, port=port, description=\\\"\\\", protocol=\\\"\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"476ff67b\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Fastkafka Tester class\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cc14b119\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.testing\\\")\\n\",\n    \"class Tester(FastKafka):\\n\",\n    \"    __test__ = False\\n\",\n    \"\\n\",\n    \"    def __init__(\\n\",\n    \"        self,\\n\",\n    \"        app: Union[FastKafka, List[FastKafka]],\\n\",\n    \"        *,\\n\",\n    \"        use_in_memory_broker: bool = True,\\n\",\n    \"    ):\\n\",\n    \"        \\\"\\\"\\\"Mirror-like object for testing a FastKafka application\\n\",\n    \"\\n\",\n    \"        Can be used as context manager\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            app: The FastKafka application to be tested.\\n\",\n    \"            use_in_memory_broker: Whether to use an in-memory broker for testing or not.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.apps = app if isinstance(app, list) else [app]\\n\",\n    \"\\n\",\n    \"        for app in self.apps:\\n\",\n    \"            app.create_mocks()\\n\",\n    \"\\n\",\n    \"        super().__init__()\\n\",\n    \"        self.mirrors: Dict[Any, Any] = {}\\n\",\n    \"        self._kafka_brokers = self.apps[0]._kafka_brokers\\n\",\n    \"        self._kafka_config[\\\"bootstrap_servers_id\\\"] = self.apps[0]._kafka_config[\\n\",\n    \"            \\\"bootstrap_servers_id\\\"\\n\",\n    \"        ]\\n\",\n    \"        self._create_mirrors()\\n\",\n    \"        self.use_in_memory_broker = use_in_memory_broker\\n\",\n    \"\\n\",\n    \"    async def _start_tester(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Starts the Tester\\\"\\\"\\\"\\n\",\n    \"        for app in self.apps:\\n\",\n    \"            await app.__aenter__()\\n\",\n    \"        self.create_mocks()\\n\",\n    \"        self._arrange_mirrors()\\n\",\n    \"        await super().__aenter__()\\n\",\n    \"        await asyncio.sleep(3)\\n\",\n    \"\\n\",\n    \"    async def _stop_tester(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"Shuts down the Tester\\\"\\\"\\\"\\n\",\n    \"        await super().__aexit__(None, None, None)\\n\",\n    \"        for app in self.apps[::-1]:\\n\",\n    \"            await app.__aexit__(None, None, None)\\n\",\n    \"\\n\",\n    \"    def _create_mirrors(self) -> None:\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    def _arrange_mirrors(self) -> None:\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    def _set_arguments_and_return_old(self,\\n\",\n    \"                      bootstrap_servers_id: Optional[str],\\n\",\n    \"                      use_in_memory_broker: bool\\n\",\n    \"                      ) -> Dict[Any, Any]:\\n\",\n    \"        initial_arguments: Dict[Any, Any] = dict()\\n\",\n    \"        initial_arguments[\\\"use_in_memory_broker\\\"] = self.use_in_memory_broker\\n\",\n    \"        self.use_in_memory_broker = use_in_memory_broker\\n\",\n    \"        \\n\",\n    \"        initial_arguments[\\\"bootstrap_servers_id\\\"] = self._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"        if bootstrap_servers_id is None:\\n\",\n    \"            bootstrap_servers_id = self._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"        else:\\n\",\n    \"            self._kafka_config[\\\"bootstrap_servers_id\\\"] = bootstrap_servers_id\\n\",\n    \"        \\n\",\n    \"        for app in self.apps:\\n\",\n    \"            initial_arguments[app] = app._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"            app._kafka_config[\\\"bootstrap_servers_id\\\"] = bootstrap_servers_id\\n\",\n    \"            \\n\",\n    \"        return initial_arguments\\n\",\n    \"    \\n\",\n    \"    def _restore_initial_arguments(self,\\n\",\n    \"                                  initial_arguments: Dict[Any, Any]\\n\",\n    \"                                  ) -> None:\\n\",\n    \"        self.use_in_memory_broker = initial_arguments[\\\"use_in_memory_broker\\\"]\\n\",\n    \"        self._kafka_config[\\\"bootstrap_servers_id\\\"] = initial_arguments[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"        \\n\",\n    \"        for app in self.apps:\\n\",\n    \"            app._kafka_config[\\\"bootstrap_servers_id\\\"] = initial_arguments[app]\\n\",\n    \"\\n\",\n    \"    @asynccontextmanager\\n\",\n    \"    async def using_external_broker(self,\\n\",\n    \"                                    bootstrap_servers_id: Optional[str] = None,\\n\",\n    \"                     ) -> AsyncGenerator[\\\"Tester\\\", None]:\\n\",\n    \"        \\\"\\\"\\\"Tester context manager for using external broker \\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            bootstrap_servers_id: The bootstrap server of aplications.\\n\",\n    \"            \\n\",\n    \"        Returns:\\n\",\n    \"            self or None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        initial_arguments = self._set_arguments_and_return_old(bootstrap_servers_id, use_in_memory_broker=False)\\n\",\n    \"            \\n\",\n    \"        async with self._create_ctx() as ctx:\\n\",\n    \"            try:\\n\",\n    \"                yield self\\n\",\n    \"            finally:\\n\",\n    \"                self._restore_initial_arguments(initial_arguments)\\n\",\n    \"                \\n\",\n    \"                \\n\",\n    \"    @asynccontextmanager\\n\",\n    \"    async def using_inmemory_broker(self, \\n\",\n    \"                      bootstrap_servers_id: Optional[str] = None,\\n\",\n    \"                     ) -> AsyncGenerator[\\\"Tester\\\", None]:\\n\",\n    \"        \\\"\\\"\\\"Tester context manager for using in-memory broker \\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            bootstrap_servers_id: The bootstrap server of aplications.\\n\",\n    \"            \\n\",\n    \"        Returns:\\n\",\n    \"            self or None\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        initial_arguments = self._set_arguments_and_return_old(bootstrap_servers_id, use_in_memory_broker=True)\\n\",\n    \"            \\n\",\n    \"        async with self._create_ctx() as ctx:\\n\",\n    \"            try:\\n\",\n    \"                yield self\\n\",\n    \"            finally:\\n\",\n    \"                self._restore_initial_arguments(initial_arguments)\\n\",\n    \"            \\n\",\n    \"            \\n\",\n    \"    @asynccontextmanager\\n\",\n    \"    async def _create_ctx(self) -> AsyncGenerator[\\\"Tester\\\", None]:\\n\",\n    \"        if self.use_in_memory_broker == True:\\n\",\n    \"            with InMemoryBroker(): # type: ignore\\n\",\n    \"                await self._start_tester()\\n\",\n    \"                try:\\n\",\n    \"                    yield self\\n\",\n    \"                finally:\\n\",\n    \"                    await self._stop_tester()\\n\",\n    \"        else:\\n\",\n    \"            await self._start_tester()\\n\",\n    \"            try:\\n\",\n    \"                yield self\\n\",\n    \"            finally:\\n\",\n    \"                await self._stop_tester()\\n\",\n    \"\\n\",\n    \"    async def __aenter__(self) -> \\\"Tester\\\":\\n\",\n    \"        self._ctx = self._create_ctx()\\n\",\n    \"        return await self._ctx.__aenter__()\\n\",\n    \"\\n\",\n    \"    async def __aexit__(self, *args: Any) -> None:\\n\",\n    \"        await self._ctx.__aexit__(*args)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"63af0257\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"<ExceptionInfo RuntimeError('ok') tblen=1>\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"<ExceptionInfo RuntimeError('ok') tblen=1>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"for _ in range(2):\\n\",\n    \"    with pytest.raises(RuntimeError) as e:\\n\",\n    \"        async with Tester(app) as tester:\\n\",\n    \"            assert tester.is_started\\n\",\n    \"            raise RuntimeError(\\\"ok\\\")\\n\",\n    \"\\n\",\n    \"    print(e)\\n\",\n    \"    assert not tester.is_started\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4de0c72a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'latest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Producing msg msg='signal'\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"tester = Tester(app)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@tester.produces()\\n\",\n    \"async def to_preprocessed_signals(msg: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Producing msg {msg}\\\")\\n\",\n    \"    return msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"tester.to_preprocessed_signals = to_preprocessed_signals\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@tester.consumes(auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_predictions(msg: TestMsg):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await asyncio.sleep(5)\\n\",\n    \"    tester.mocks.on_predictions.assert_called()\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"938e5f89\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Test multiple brokers\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a0ce0e2e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['server_1:9092']}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Producing msg msg='signal'\\n\",\n      \"Defined broker:  msg=TestMsg(msg='signal')\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers_1 = dict(localhost=[dict(url=\\\"server_1\\\", port=\\\"9092\\\")])\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"server_2\\\", port=\\\"9092\\\"))\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_preprocessed_signals_1(msg: TestMsg):\\n\",\n    \"    print(f\\\"Default broker:  {msg=}\\\")\\n\",\n    \"    \\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals_2(msg: TestMsg):\\n\",\n    \"    print(f\\\"Defined broker:  {msg=}\\\")\\n\",\n    \"\\n\",\n    \"tester = Tester(app)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@tester.produces(brokers=kafka_brokers_2)\\n\",\n    \"async def to_preprocessed_signals(msg: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Producing msg {msg}\\\")\\n\",\n    \"    return msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"tester.to_preprocessed_signals = to_preprocessed_signals\\n\",\n    \"\\n\",\n    \"async with tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await asyncio.sleep(5)\\n\",\n    \"    await app.awaited_mocks.on_preprocessed_signals_2.assert_called(\\n\",\n    \"        timeout=5\\n\",\n    \"    )\\n\",\n    \"    await app.awaited_mocks.on_preprocessed_signals_1.assert_not_called(\\n\",\n    \"        timeout=5\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7b69daca\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['server_2:9092']}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['server_2:9092']}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Producing msg msg='signal'\\n\",\n      \"msg=TestMsg(msg='signal')\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"tester: msg=TestMsg(msg='prediction')\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers_1 = dict(localhost=dict(url=\\\"server_1\\\", port=\\\"9092\\\"))\\n\",\n    \"kafka_brokers_2 = dict(localhost=[dict(url=\\\"server_2\\\", port=\\\"9092\\\")])\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    print(f\\\"{msg=}\\\")\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"tester = Tester(app)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@tester.produces(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def to_preprocessed_signals(msg: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Producing msg {msg}\\\")\\n\",\n    \"    return msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@tester.consumes(auto_offset_reset=\\\"earliest\\\", brokers=kafka_brokers_1)\\n\",\n    \"async def on_predictions(msg: TestMsg):\\n\",\n    \"    print(f\\\"tester: {msg=}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"tester.to_preprocessed_signals = to_preprocessed_signals\\n\",\n    \"\\n\",\n    \"async with tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await asyncio.sleep(5)\\n\",\n    \"    await app.awaited_mocks.on_preprocessed_signals.assert_called(timeout=5)\\n\",\n    \"    await tester.awaited_mocks.on_predictions.assert_called(timeout=5)\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3ea8c473\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Mirroring\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c20a5a41\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def mirror_producer(\\n\",\n    \"    topic: str, producer_f: Callable[..., Any], brokers: str, app: FastKafka\\n\",\n    \") -> Callable[..., Any]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decorator to create a mirrored producer function.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The topic to produce to.\\n\",\n    \"        producer_f: The original producer function.\\n\",\n    \"        brokers: The brokers configuration.\\n\",\n    \"        app: The FastKafka application.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The mirrored producer function.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    msg_type = inspect.signature(producer_f).return_annotation\\n\",\n    \"\\n\",\n    \"    msg_type_unwrapped = unwrap_list_type(unwrap_from_kafka_event(msg_type))\\n\",\n    \"\\n\",\n    \"    async def skeleton_func(msg: BaseModel) -> None:\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    mirror_func = skeleton_func\\n\",\n    \"    sig = inspect.signature(skeleton_func)\\n\",\n    \"\\n\",\n    \"    # adjust name, take into consideration the origin app and brokers\\n\",\n    \"    # configuration so that we can differentiate those two\\n\",\n    \"    mirror_func.__name__ = f\\\"mirror_{id(app)}_on_{remove_suffix(topic).replace('.', '_').replace('-', '_')}_{abs(hash(brokers))}\\\"\\n\",\n    \"\\n\",\n    \"    # adjust arg and return val\\n\",\n    \"    sig = sig.replace(\\n\",\n    \"        parameters=[\\n\",\n    \"            inspect.Parameter(\\n\",\n    \"                name=\\\"msg\\\",\\n\",\n    \"                annotation=msg_type_unwrapped,\\n\",\n    \"                kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,\\n\",\n    \"            )\\n\",\n    \"        ]\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    mirror_func.__signature__ = sig  # type: ignore\\n\",\n    \"\\n\",\n    \"    return mirror_func\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e006e6d7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_topic1() -> TestMsg:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"topic2\\\")\\n\",\n    \"async def some_log(in_var: int) -> TestMsg:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"topic2\\\", brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9093)))\\n\",\n    \"async def some_log_1(in_var: int) -> TestMsg:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"topic2\\\", brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9093)))\\n\",\n    \"async def some_log_2(in_var: int) -> TestMsg:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"for topic, (producer_f, _, brokers, _) in app._producers_store.items():\\n\",\n    \"    mirror = mirror_producer(\\n\",\n    \"        topic,\\n\",\n    \"        producer_f,\\n\",\n    \"        brokers.model_dump_json() if brokers is not None else app._kafka_brokers.model_dump_json(),\\n\",\n    \"        app,\\n\",\n    \"    )\\n\",\n    \"    assert \\\"_\\\".join(mirror.__name__.split(\\\"_\\\")[2:-1]) == \\\"on_\\\" + remove_suffix(topic)\\n\",\n    \"    assert (\\n\",\n    \"        inspect.signature(mirror).parameters[\\\"msg\\\"].annotation.__name__\\n\",\n    \"        == inspect.Parameter(\\n\",\n    \"            name=\\\"msg\\\",\\n\",\n    \"            annotation=TestMsg,\\n\",\n    \"            kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,\\n\",\n    \"        ).annotation.__name__\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6c20acfa\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)))\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_topic1() -> TestMsg:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"topic2\\\")\\n\",\n    \"async def some_log(in_var: int) -> KafkaEvent[List[TestMsg]]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"for topic, (producer_f, _, brokers, _) in app._producers_store.items():\\n\",\n    \"    mirror = mirror_producer(\\n\",\n    \"        topic,\\n\",\n    \"        producer_f,\\n\",\n    \"        brokers.model_dump_json() if brokers is not None else app._kafka_brokers.model_dump_json(),\\n\",\n    \"        app\\n\",\n    \"    )\\n\",\n    \"    assert \\\"_\\\".join(mirror.__name__.split(\\\"_\\\")[2:-1]) == \\\"on_\\\" + remove_suffix(topic)\\n\",\n    \"    assert inspect.signature(mirror).parameters[\\\"msg\\\"].annotation.__name__ == \\\"TestMsg\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e96d23ac\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def mirror_consumer(\\n\",\n    \"    topic: str, consumer_f: Callable[..., Any], brokers: str, app: FastKafka\\n\",\n    \") -> Callable[[BaseModel], Coroutine[Any, Any, BaseModel]]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decorator to create a mirrored consumer function.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The topic to consume from.\\n\",\n    \"        consumer_f: The original consumer function.\\n\",\n    \"        brokers: The brokers configuration.\\n\",\n    \"        app: The FastKafka application.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The mirrored consumer function.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    msg_type = inspect.signature(consumer_f).parameters[\\\"msg\\\"]\\n\",\n    \"\\n\",\n    \"    msg_type_unwrapped = unwrap_list_type(msg_type)\\n\",\n    \"\\n\",\n    \"    async def skeleton_func(msg: BaseModel) -> BaseModel:\\n\",\n    \"        return msg\\n\",\n    \"\\n\",\n    \"    mirror_func = skeleton_func\\n\",\n    \"    sig = inspect.signature(skeleton_func)\\n\",\n    \"\\n\",\n    \"    # adjust name, take into consideration the origin app and brokers\\n\",\n    \"    # configuration so that we can differentiate those two\\n\",\n    \"    mirror_func.__name__ = f\\\"mirror_{id(app)}_to_{remove_suffix(topic).replace('.', '_').replace('-', '_')}_{abs(hash(brokers))}\\\"\\n\",\n    \"\\n\",\n    \"    # adjust arg and return val\\n\",\n    \"    sig = sig.replace(\\n\",\n    \"        parameters=[msg_type], return_annotation=msg_type_unwrapped.annotation\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    mirror_func.__signature__ = sig  # type: ignore\\n\",\n    \"    return mirror_func\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a79f408f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"for topic, (consumer_f, _, _, brokers, _) in app._consumers_store.items():\\n\",\n    \"    mirror = mirror_consumer(\\n\",\n    \"        topic,\\n\",\n    \"        consumer_f,\\n\",\n    \"        brokers.model_dump_json() if brokers is not None else app._kafka_brokers.model_dump_json(),\\n\",\n    \"        app\\n\",\n    \"    )\\n\",\n    \"    assert \\\"_\\\".join(mirror.__name__.split(\\\"_\\\")[3:-1]) == \\\"to_\\\" + remove_suffix(topic)\\n\",\n    \"    assert (\\n\",\n    \"        inspect.signature(mirror).return_annotation.__name__ == TestMsg.__name__\\n\",\n    \"    ), inspect.signature(mirror).return_annotation.__name__\\n\",\n    \"    assert (\\n\",\n    \"        inspect.signature(mirror).parameters[\\\"msg\\\"].annotation.__name__\\n\",\n    \"        == inspect.Parameter(\\n\",\n    \"            name=\\\"msg\\\",\\n\",\n    \"            annotation=TestMsg,\\n\",\n    \"            kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,\\n\",\n    \"        ).annotation.__name__\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1737f993\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def _create_mirrors(self: Tester) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Creates mirror functions for producers and consumers.\\n\",\n    \"\\n\",\n    \"    Iterates over the FastKafka application and its producers and consumers. For each consumer, it creates a mirror\\n\",\n    \"    consumer function using the `mirror_consumer` decorator. For each producer, it creates a mirror producer function\\n\",\n    \"    using the `mirror_producer` decorator. The mirror functions are stored in the `self.mirrors` dictionary and also\\n\",\n    \"    set as attributes on the Tester instance.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    for app in self.apps:\\n\",\n    \"        for topic, (consumer_f, _, _, brokers, _) in app._consumers_store.items():\\n\",\n    \"            mirror_f = mirror_consumer(\\n\",\n    \"                topic,\\n\",\n    \"                consumer_f,\\n\",\n    \"                brokers.model_dump_json() if brokers is not None else app._kafka_brokers.model_dump_json(),\\n\",\n    \"                app,\\n\",\n    \"            )\\n\",\n    \"            mirror_f = self.produces(  # type: ignore\\n\",\n    \"                topic=remove_suffix(topic),\\n\",\n    \"                brokers=brokers,\\n\",\n    \"            )(mirror_f)\\n\",\n    \"            self.mirrors[consumer_f] = mirror_f\\n\",\n    \"            setattr(self, mirror_f.__name__, mirror_f)\\n\",\n    \"        for topic, (producer_f, _, brokers, _) in app._producers_store.items():\\n\",\n    \"            mirror_f = mirror_producer(\\n\",\n    \"                topic,\\n\",\n    \"                producer_f,\\n\",\n    \"                brokers.model_dump_json() if brokers is not None else app._kafka_brokers.model_dump_json(),\\n\",\n    \"                app,\\n\",\n    \"            )\\n\",\n    \"            mirror_f = self.consumes(\\n\",\n    \"                topic=remove_suffix(topic),\\n\",\n    \"                brokers=brokers,\\n\",\n    \"            )(\\n\",\n    \"                mirror_f  # type: ignore\\n\",\n    \"            )\\n\",\n    \"            self.mirrors[producer_f] = mirror_f\\n\",\n    \"            setattr(self, mirror_f.__name__, mirror_f)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"af0ccdda\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"kafka_brokers_1 = dict(localhost=[dict(url=\\\"server_1\\\", port=9092)])\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"server_2\\\", port=9092))\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    print(f\\\"{msg=}\\\")\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"tester = Tester(app)\\n\",\n    \"\\n\",\n    \"assert hasattr(\\n\",\n    \"    tester,\\n\",\n    \"    f\\\"mirror_{id(app)}_to_preprocessed_signals_{abs(hash(_get_kafka_brokers(kafka_brokers_2).model_dump_json()))}\\\",\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"assert hasattr(\\n\",\n    \"    tester,\\n\",\n    \"    f\\\"mirror_{id(app)}_on_predictions_{abs(hash(_get_kafka_brokers(app._kafka_brokers).model_dump_json()))}\\\",\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"86bd35dc\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['server_1:9092']}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['server_1:9092']}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"msg=TestMsg(msg='signal')\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers_1 = dict(localhost=[dict(url=\\\"server_1\\\", port=9092)])\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"server_2\\\", port=9092))\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    print(f\\\"{msg=}\\\")\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    await getattr(\\n\",\n    \"        tester,\\n\",\n    \"        f\\\"mirror_{id(app)}_to_preprocessed_signals_{abs(hash(_get_kafka_brokers(kafka_brokers_2).model_dump_json()))}\\\",\\n\",\n    \"    )(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await asyncio.sleep(5)\\n\",\n    \"    await app.awaited_mocks.on_preprocessed_signals.assert_called(timeout=5)\\n\",\n    \"    await getattr(\\n\",\n    \"        tester.awaited_mocks,\\n\",\n    \"        f\\\"mirror_{id(app)}_on_predictions_{abs(hash(_get_kafka_brokers(kafka_brokers_1).model_dump_json()))}\\\",\\n\",\n    \"    ).assert_called(timeout=5)\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"7af2c251\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Mirrors dict and syntax sugar\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6d5a5984\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class AmbiguousWarning:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Warning class used for ambiguous topics.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The ambiguous topic.\\n\",\n    \"        functions: List of function names associated with the ambiguous topic.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    def __init__(self, topic: str, functions: List[str]):\\n\",\n    \"        self.topic = topic\\n\",\n    \"        self.functions = functions\\n\",\n    \"\\n\",\n    \"    def __getattribute__(self, attr: str) -> Any:\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            f\\\"Ambiguous topic: {super().__getattribute__('topic')}, for functions: {super().__getattribute__('functions')}\\\\nUse Tester.mirrors[app.function] to resolve ambiguity\\\"\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    def __call__(self, *args: Any, **kwargs: Any) -> Any:\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            f\\\"Ambiguous topic: {self.topic}, for functions: {self.functions}\\\\nUse Tester.mirrors[app.function] to resolve ambiguity\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f86a0f07\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with pytest.raises(Exception) as e:\\n\",\n    \"    AmbiguousWarning(topic=\\\"some_topic\\\", functions=[\\\"some_functions\\\"])(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    \\n\",\n    \"assert e.value.args[0] == \\\"Ambiguous topic: some_topic, for functions: ['some_functions']\\\\nUse Tester.mirrors[app.function] to resolve ambiguity\\\"\\n\",\n    \"\\n\",\n    \"with pytest.raises(Exception) as e:\\n\",\n    \"    AmbiguousWarning(topic=\\\"some_topic\\\", functions=[\\\"some_brokers\\\"]).assert_called(timeout=5)\\n\",\n    \"    \\n\",\n    \"assert e.value.args[0] == \\\"Ambiguous topic: some_topic, for functions: ['some_brokers']\\\\nUse Tester.mirrors[app.function] to resolve ambiguity\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4a841f55\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def set_sugar(\\n\",\n    \"    *,\\n\",\n    \"    tester: Tester,\\n\",\n    \"    prefix: str,\\n\",\n    \"    topic_brokers: Dict[str, Tuple[List[str], List[str]]],\\n\",\n    \"    topic: str,\\n\",\n    \"    brokers: str,\\n\",\n    \"    origin_function_name: str,\\n\",\n    \"    function: Callable[..., Union[Any, Awaitable[Any]]],\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Sets the sugar function for a topic.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        tester: The Tester instance.\\n\",\n    \"        prefix: The prefix to use for the sugar function (e.g., \\\"to_\\\" or \\\"on_\\\").\\n\",\n    \"        topic_brokers: Dictionary to store the brokers and functions associated with each topic.\\n\",\n    \"        topic: The topic name.\\n\",\n    \"        brokers: The brokers configuration.\\n\",\n    \"        origin_function_name: The name of the original function.\\n\",\n    \"        function: The mirror function to be set as the sugar function.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    brokers_for_topic, functions_for_topic = topic_brokers.get(topic, ([], []))\\n\",\n    \"    if brokers not in brokers_for_topic:\\n\",\n    \"        brokers_for_topic.append(brokers)\\n\",\n    \"        functions_for_topic.append(origin_function_name)\\n\",\n    \"        topic_brokers[topic] = (brokers_for_topic, functions_for_topic)\\n\",\n    \"    if len(brokers_for_topic) == 1:\\n\",\n    \"        setattr(tester, f\\\"{prefix}{topic}\\\", function)\\n\",\n    \"    else:\\n\",\n    \"        setattr(\\n\",\n    \"            tester, f\\\"{prefix}{topic}\\\", AmbiguousWarning(topic, functions_for_topic)\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4d8480ce\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def _arrange_mirrors(self: Tester) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Arranges the mirror functions.\\n\",\n    \"\\n\",\n    \"    Iterates over the FastKafka application and its producers and consumers. For each consumer, it retrieves the mirror\\n\",\n    \"    function from the `self.mirrors` dictionary and sets it as an attribute on the Tester instance. It also sets the\\n\",\n    \"    sugar function using the `set_sugar` function. For each producer, it retrieves the mirror function and sets it as\\n\",\n    \"    an attribute on the Tester instance. It also sets the sugar function for the awaited mocks. Finally, it creates the\\n\",\n    \"    `mocks` and `awaited_mocks` namedtuples and sets them as attributes on the Tester instance.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    topic_brokers: Dict[str, Tuple[List[str], List[str]]] = {}\\n\",\n    \"    mocks = {}\\n\",\n    \"    awaited_mocks = {}\\n\",\n    \"    for app in self.apps:\\n\",\n    \"        for topic, (consumer_f, _, _, brokers, _) in app._consumers_store.items():\\n\",\n    \"            mirror_f = self.mirrors[consumer_f]\\n\",\n    \"            self.mirrors[getattr(app, consumer_f.__name__)] = mirror_f\\n\",\n    \"            set_sugar(\\n\",\n    \"                tester=self,\\n\",\n    \"                prefix=\\\"to_\\\",\\n\",\n    \"                topic_brokers=topic_brokers,\\n\",\n    \"                topic=remove_suffix(topic).replace(\\\".\\\", \\\"_\\\").replace(\\\"-\\\", \\\"_\\\"),\\n\",\n    \"                brokers=brokers.model_dump_json()\\n\",\n    \"                if brokers is not None\\n\",\n    \"                else app._kafka_brokers.model_dump_json(),\\n\",\n    \"                origin_function_name=consumer_f.__name__,\\n\",\n    \"                function=mirror_f,\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"            mocks[\\n\",\n    \"                f\\\"to_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\\\"\\n\",\n    \"            ] = getattr(self.mocks, mirror_f.__name__)\\n\",\n    \"            awaited_mocks[\\n\",\n    \"                f\\\"to_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\\\"\\n\",\n    \"            ] = getattr(self.awaited_mocks, mirror_f.__name__)\\n\",\n    \"\\n\",\n    \"        for topic, (producer_f, _, brokers, _) in app._producers_store.items():\\n\",\n    \"            mirror_f = self.mirrors[producer_f]\\n\",\n    \"            self.mirrors[getattr(app, producer_f.__name__)] = getattr(\\n\",\n    \"                self.awaited_mocks, mirror_f.__name__\\n\",\n    \"            )\\n\",\n    \"            set_sugar(\\n\",\n    \"                tester=self,\\n\",\n    \"                prefix=\\\"on_\\\",\\n\",\n    \"                topic_brokers=topic_brokers,\\n\",\n    \"                topic=remove_suffix(topic).replace(\\\".\\\", \\\"_\\\").replace(\\\"-\\\", \\\"_\\\"),\\n\",\n    \"                brokers=brokers.model_dump_json()\\n\",\n    \"                if brokers is not None\\n\",\n    \"                else app._kafka_brokers.model_dump_json(),\\n\",\n    \"                origin_function_name=producer_f.__name__,\\n\",\n    \"                function=getattr(self.awaited_mocks, mirror_f.__name__),\\n\",\n    \"            )\\n\",\n    \"            mocks[\\n\",\n    \"                f\\\"on_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\\\"\\n\",\n    \"            ] = getattr(self.mocks, mirror_f.__name__)\\n\",\n    \"            awaited_mocks[\\n\",\n    \"                f\\\"on_{remove_suffix(topic).replace('.', '_').replace('-', '_')}\\\"\\n\",\n    \"            ] = getattr(self.awaited_mocks, mirror_f.__name__)\\n\",\n    \"\\n\",\n    \"    AppMocks = collections.namedtuple(  # type: ignore\\n\",\n    \"        f\\\"{self.__class__.__name__}Mocks\\\", [f_name for f_name in mocks]\\n\",\n    \"    )\\n\",\n    \"    setattr(self, \\\"mocks\\\", AppMocks(**mocks))\\n\",\n    \"    setattr(self, \\\"awaited_mocks\\\", AppMocks(**awaited_mocks))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f0cb8429\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['store_product']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test mock calls immutability\\n\",\n    \"\\n\",\n    \"class Currency(BaseModel):\\n\",\n    \"    currency: str = Field(..., description=\\\"Currency\\\")\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"server_2\\\", port=9092)))\\n\",\n    \"\\n\",\n    \"@app.consumes(prefix=\\\"on\\\", topic=\\\"store_product\\\")\\n\",\n    \"async def on_store_product(msg: Currency):\\n\",\n    \"    msg.currency = \\\"EUR\\\"\\n\",\n    \"    \\n\",\n    \"async with Tester(app).using_inmemory_broker() as tester:\\n\",\n    \"    await tester.to_store_product(Currency(currency=\\\"HRK\\\"))\\n\",\n    \"    await app.awaited_mocks.on_store_product.assert_called_with(\\n\",\n    \"        Currency(currency=\\\"HRK\\\"),\\n\",\n    \"        timeout=5\\n\",\n    \"    )\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"00d84884\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test batch mirroring\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"second_app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@second_app.consumes()\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg, meta: EventMetadata):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@second_app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> List[TestMsg]:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return [prediction]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(second_app) as tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester.awaited_mocks.on_predictions.assert_called(timeout=5)\\n\",\n    \"    tester.mocks.on_predictions.assert_called()\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5b59f70e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"msg=TestMsg(msg='signal')\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[WARNING] fastkafka._components.task_streaming: e=AttributeError(\\\"'NoneType' object has no attribute 'create_batch'\\\")\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers_1 = dict(localhost=dict(url=\\\"server_1\\\", port=9092))\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"server_2\\\", port=9092))\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_preprocessed_signals_1(msg: TestMsg):\\n\",\n    \"    print(f\\\"{msg=}\\\")\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals_2(msg: TestMsg):\\n\",\n    \"    print(f\\\"{msg=}\\\")\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    with pytest.raises(Exception) as exception_produce:\\n\",\n    \"        await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    assert (\\n\",\n    \"        exception_produce.value.args[0]\\n\",\n    \"        == \\\"Ambiguous topic: preprocessed_signals, for functions: ['on_preprocessed_signals_1', 'on_preprocessed_signals_2']\\\\nUse Tester.mirrors[app.function] to resolve ambiguity\\\"\\n\",\n    \"    )\\n\",\n    \"    await tester.mirrors[on_preprocessed_signals_1](TestMsg(msg=\\\"signal\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8ed56429\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['server_2:9092']}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['server_2:9092']}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['server_2:9092']}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed-signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['server_2:9092']}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test topics with hyphens\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed-signals\\\")\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"    await tester.awaited_mocks.on_predictions.assert_called_with(\\n\",\n    \"        TestMsg(msg=\\\"prediction\\\"),\\n\",\n    \"        timeout=60,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"63606cfb\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['server_2:9092']}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['server_2:9092']}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"kafka_brokers_1 = dict(localhost=[dict(url=\\\"server_1\\\", port=9092)])\\n\",\n    \"kafka_brokers_1 = dict(localhost=[dict(url=\\\"server_2\\\", port=9092)])\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions_1(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def to_predictions_2(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    with pytest.raises(Exception) as exception_consume:\\n\",\n    \"        await tester.on_predictions.assert_called(timeout=5)\\n\",\n    \"    assert (\\n\",\n    \"        exception_consume.value.args[0]\\n\",\n    \"        == \\\"Ambiguous topic: predictions, for functions: ['to_predictions_1', 'to_predictions_2']\\\\nUse Tester.mirrors[app.function] to resolve ambiguity\\\"\\n\",\n    \"    )\\n\",\n    \"    await tester.mirrors[app.to_predictions_1].assert_not_called(timeout=5)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c10ea363\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Test KafkaEvent mirroring and consumer batching\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"second_app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@second_app.consumes()\\n\",\n    \"async def on_preprocessed_signals(msg: List[TestMsg]):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@second_app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> KafkaEvent[TestMsg]:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return KafkaEvent(message=prediction, key=b\\\"123\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(second_app) as tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester.on_predictions.assert_called(timeout=5)\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f237621b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['server_2:9092']}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['server_2:9092']}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Initiate tester with two apps\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"second_app = FastKafka(kafka_brokers=dict(localhost=dict(url=\\\"server_2\\\", port=9092)))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@second_app.consumes()\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@second_app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester([app, second_app]) as tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester.mirrors[second_app.to_predictions].assert_called(timeout=5)\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2aef439e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"kafka_brokers_1 = dict(localhost=dict(url=\\\"server\\\", port=9092), production=dict(url=\\\"prod_server\\\", port=9092))\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"broker_2_server\\\", port=9092), production=dict(url=\\\"broker_2_prod_server\\\", port=9092))\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    await to_predictions(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def to_predictions_2(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return prediction\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e524f7c6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'broker_2_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'broker_2_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'prod_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'broker_2_prod_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'prod_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'prod_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'prod_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'broker_2_prod_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"tester = Tester([app])\\n\",\n    \"\\n\",\n    \"with InMemoryBroker() as broker:\\n\",\n    \"    async with tester.using_external_broker() as tester1:\\n\",\n    \"        assert tester1.use_in_memory_broker == False\\n\",\n    \"        bootstraps_topics_groups = list(broker.topic_groups.keys())\\n\",\n    \"        # kafka_brokers1 localhost\\n\",\n    \"        assert bootstraps_topics_groups[0][0] == \\\"server:9092\\\"\\n\",\n    \"        assert bootstraps_topics_groups[1][0] == \\\"server:9092\\\"\\n\",\n    \"        # kafka_brokers2 localhost\\n\",\n    \"        assert bootstraps_topics_groups[2][0] == \\\"broker_2_server:9092\\\"\\n\",\n    \"        assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == 'localhost'\\n\",\n    \"        assert tester1._kafka_config[\\\"bootstrap_servers_id\\\"] == app._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"\\n\",\n    \"assert tester.use_in_memory_broker == True\\n\",\n    \"        \\n\",\n    \"with InMemoryBroker() as broker:   \\n\",\n    \"    async with tester.using_external_broker(bootstrap_servers_id=\\\"production\\\") as tester2:\\n\",\n    \"        assert tester2.use_in_memory_broker == False\\n\",\n    \"        bootstraps_topics_groups = list(broker.topic_groups.keys())\\n\",\n    \"        # kafka_brokers1 production\\n\",\n    \"        assert bootstraps_topics_groups[0][0] == \\\"prod_server:9092\\\"\\n\",\n    \"        assert bootstraps_topics_groups[1][0] == \\\"prod_server:9092\\\"\\n\",\n    \"        # kafka_brokers2 production\\n\",\n    \"        assert bootstraps_topics_groups[2][0] == \\\"broker_2_prod_server:9092\\\"\\n\",\n    \"        \\n\",\n    \"        assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == 'production'\\n\",\n    \"        assert tester2._kafka_config[\\\"bootstrap_servers_id\\\"] == app._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"        \\n\",\n    \"assert tester.use_in_memory_broker == True\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fba16412\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'broker_2_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'broker_2_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'prod_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'broker_2_prod_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'prod_server:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'prod_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'prod_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'broker_2_prod_server:9092'}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Sending prediction: msg='prediction'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"tester = Tester([app])\\n\",\n    \"\\n\",\n    \"async with tester.using_inmemory_broker() as tester1:\\n\",\n    \"    assert tester1.use_in_memory_broker == True\\n\",\n    \"    await tester1.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester1.mirrors[app.to_predictions].assert_called(timeout=5)\\n\",\n    \"\\n\",\n    \"    assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == 'localhost'\\n\",\n    \"    assert tester1._kafka_config[\\\"bootstrap_servers_id\\\"] == app._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"\\n\",\n    \"assert tester.use_in_memory_broker == True\\n\",\n    \"\\n\",\n    \"async with tester.using_inmemory_broker(bootstrap_servers_id=\\\"production\\\") as tester2:\\n\",\n    \"    assert tester2.use_in_memory_broker == True\\n\",\n    \"    await tester2.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester2.mirrors[app.to_predictions].assert_called(timeout=5)\\n\",\n    \"\\n\",\n    \"    assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == 'production'\\n\",\n    \"    assert tester2._kafka_config[\\\"bootstrap_servers_id\\\"] == app._kafka_config[\\\"bootstrap_servers_id\\\"]\\n\",\n    \"    \\n\",\n    \"assert tester.use_in_memory_broker == True\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/017_Benchmarking.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b2a39e99\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.benchmarking\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a6aec7a7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"from collections import deque\\n\",\n    \"from datetime import datetime, timedelta\\n\",\n    \"from statistics import mean\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import get_logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d9f3e46b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"acb8be86\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"suppress_timestamps()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"79f00686\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(\\\"fastkafka.benchmark\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"518b9b49\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _benchmark(\\n\",\n    \"    interval: Union[int, timedelta] = 1,\\n\",\n    \"    *,\\n\",\n    \"    sliding_window_size: Optional[int] = None,\\n\",\n    \"    func_name: str,\\n\",\n    \"    benchmark_results: Dict[str, Dict[str, Any]],\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"Used to record the benchmark results(throughput, average throughput, standard deviation) of a given function\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        interval: the time interval after which the benchmark results are logged.\\n\",\n    \"        sliding_window_size: the maximum number of benchmark results to use to calculate average throughput and standard deviation.\\n\",\n    \"        func_name: the name of the function to be benchmarked.\\n\",\n    \"        benchmark_results: a dictionary containing the benchmark results of all functions.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if isinstance(interval, int):\\n\",\n    \"        interval = timedelta(seconds=interval)\\n\",\n    \"    if func_name not in benchmark_results:\\n\",\n    \"        benchmark_results[func_name] = {\\n\",\n    \"            \\\"count\\\": 0,\\n\",\n    \"            \\\"last_count\\\": 0,\\n\",\n    \"            \\\"start\\\": None,\\n\",\n    \"            \\\"last_start\\\": None,\\n\",\n    \"            \\\"history\\\": [],\\n\",\n    \"        }\\n\",\n    \"        if sliding_window_size is not None:\\n\",\n    \"            benchmark_results[func_name][\\\"history\\\"] = deque(maxlen=sliding_window_size)\\n\",\n    \"\\n\",\n    \"    benchmark_results[func_name][\\\"count\\\"] += 1\\n\",\n    \"\\n\",\n    \"    if benchmark_results[func_name][\\\"count\\\"] == 1:\\n\",\n    \"        benchmark_results[func_name][\\\"start\\\"] = benchmark_results[func_name][\\n\",\n    \"            \\\"last_start\\\"\\n\",\n    \"        ] = datetime.utcnow()\\n\",\n    \"\\n\",\n    \"    diff = datetime.utcnow() - benchmark_results[func_name][\\\"last_start\\\"]\\n\",\n    \"    if diff >= interval:\\n\",\n    \"        throughput = (\\n\",\n    \"            benchmark_results[func_name][\\\"count\\\"]\\n\",\n    \"            - benchmark_results[func_name][\\\"last_count\\\"]\\n\",\n    \"        ) / (diff / timedelta(seconds=1))\\n\",\n    \"        log_msg = f\\\"Throughput = {throughput:5,.0f}\\\"\\n\",\n    \"\\n\",\n    \"        if sliding_window_size is not None:\\n\",\n    \"            benchmark_results[func_name][\\\"history\\\"].append(throughput)\\n\",\n    \"\\n\",\n    \"            log_msg += f\\\", Avg throughput = {mean(benchmark_results[func_name]['history']):5,.0f}\\\"\\n\",\n    \"        #             if len(benchmark_results[func_name][\\\"history\\\"]) > 1:\\n\",\n    \"        #                 log_msg += f\\\", Standard deviation of throughput is {stdev(benchmark_results[func_name]['history']):5,.0f}\\\"\\n\",\n    \"        log_msg = (\\n\",\n    \"            log_msg\\n\",\n    \"            + f\\\" - For {func_name}(interval={interval.seconds},{sliding_window_size=})\\\"\\n\",\n    \"        )\\n\",\n    \"        logger.info(log_msg)\\n\",\n    \"\\n\",\n    \"        benchmark_results[func_name][\\\"last_start\\\"] = datetime.utcnow()\\n\",\n    \"        benchmark_results[func_name][\\\"last_count\\\"] = benchmark_results[func_name][\\n\",\n    \"            \\\"count\\\"\\n\",\n    \"        ]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0d027a73\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka.benchmark: Throughput = 2,401,289, Avg throughput = 2,401,289 - For dummy_func(interval=1,sliding_window_size=5)\\n\",\n      \"[INFO] fastkafka.benchmark: Throughput = 2,532,538, Avg throughput = 2,466,914 - For dummy_func(interval=1,sliding_window_size=5)\\n\",\n      \"[INFO] fastkafka.benchmark: Throughput = 2,523,505, Avg throughput = 2,485,777 - For dummy_func(interval=1,sliding_window_size=5)\\n\",\n      \"[INFO] fastkafka.benchmark: Throughput = 2,466,875, Avg throughput = 2,481,052 - For dummy_func(interval=1,sliding_window_size=5)\\n\",\n      \"[INFO] fastkafka.benchmark: Throughput = 2,479,232, Avg throughput = 2,480,688 - For dummy_func(interval=1,sliding_window_size=5)\\n\",\n      \"[INFO] fastkafka.benchmark: Throughput = 2,488,960, Avg throughput = 2,498,222 - For dummy_func(interval=1,sliding_window_size=5)\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'dummy_func': {'count': 15000000,\\n\",\n       \"  'last_count': 14892399,\\n\",\n       \"  'start': datetime.datetime(2023, 4, 7, 10, 48, 19, 887819),\\n\",\n       \"  'last_start': datetime.datetime(2023, 4, 7, 10, 48, 25, 891228),\\n\",\n       \"  'history': deque([2532538.0, 2523505.0, 2466875.0, 2479232.0, 2488960.0],\\n\",\n       \"        maxlen=5)}}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"interval = timedelta(seconds=1)\\n\",\n    \"sliding_window_size = 5\\n\",\n    \"func_name = \\\"dummy_func\\\"\\n\",\n    \"benchmark_results = dict()\\n\",\n    \"\\n\",\n    \"n = 15_000_000\\n\",\n    \"for i in range(n):\\n\",\n    \"    _benchmark(\\n\",\n    \"        interval=interval,\\n\",\n    \"        sliding_window_size=sliding_window_size,\\n\",\n    \"        func_name=func_name,\\n\",\n    \"        benchmark_results=benchmark_results,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"display(benchmark_results)\\n\",\n    \"\\n\",\n    \"assert benchmark_results[func_name][\\\"count\\\"] == n\\n\",\n    \"assert len(benchmark_results[func_name][\\\"history\\\"]) <= sliding_window_size, len(\\n\",\n    \"    benchmark_results[func_name][\\\"history\\\"]\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e4570e65\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/018_Avro_Encode_Decoder.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e484e6a3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.encoder.avro\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"330229f3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"/home/kumaran/.local/lib/python3.11/site-packages/pydantic/_internal/_config.py:257: UserWarning: Valid config keys have changed in V2:\\n\",\n      \"* 'json_encoders' has been removed\\n\",\n      \"  warnings.warn(message, UserWarning)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"import io\\n\",\n    \"import json\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import fastavro\\n\",\n    \"from pydantic import BaseModel, create_model\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import export\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"84ed548a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import tempfile\\n\",\n    \"\\n\",\n    \"import pytest\\n\",\n    \"from pydantic import Field\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"84c22900\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"40c6f220\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1e61e48a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d561af81\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b19543e7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.encoder\\\")\\n\",\n    \"class AvroBase(BaseModel):\\n\",\n    \"    \\\"\\\"\\\"This is base pydantic class that will add some methods\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    @classmethod\\n\",\n    \"    def avro_schema_for_pydantic_object(\\n\",\n    \"        cls,\\n\",\n    \"        pydantic_model: BaseModel,\\n\",\n    \"        by_alias: bool = True,\\n\",\n    \"        namespace: Optional[str] = None,\\n\",\n    \"    ) -> Dict[str, Any]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Returns the Avro schema for the given Pydantic object.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            pydantic_model (BaseModel): The Pydantic object.\\n\",\n    \"            by_alias (bool, optional): Generate schemas using aliases defined. Defaults to True.\\n\",\n    \"            namespace (Optional[str], optional): Optional namespace string for schema generation.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            Dict[str, Any]: The Avro schema for the model.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        schema = pydantic_model.__class__.model_json_schema(by_alias=by_alias)\\n\",\n    \"\\n\",\n    \"        if namespace is None:\\n\",\n    \"            # default namespace will be based on title\\n\",\n    \"            namespace = schema[\\\"title\\\"]\\n\",\n    \"\\n\",\n    \"        return cls._avro_schema(schema, namespace)\\n\",\n    \"    \\n\",\n    \"    @classmethod\\n\",\n    \"    def avro_schema_for_pydantic_class(\\n\",\n    \"        cls,\\n\",\n    \"        pydantic_model: Type[BaseModel],\\n\",\n    \"        by_alias: bool = True,\\n\",\n    \"        namespace: Optional[str] = None,\\n\",\n    \"    ) -> Dict[str, Any]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Returns the Avro schema for the given Pydantic class.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            pydantic_model (Type[BaseModel]): The Pydantic class.\\n\",\n    \"            by_alias (bool, optional): Generate schemas using aliases defined. Defaults to True.\\n\",\n    \"            namespace (Optional[str], optional): Optional namespace string for schema generation.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            Dict[str, Any]: The Avro schema for the model.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        schema = pydantic_model.model_json_schema(by_alias=by_alias)\\n\",\n    \"\\n\",\n    \"        if namespace is None:\\n\",\n    \"            # default namespace will be based on title\\n\",\n    \"            namespace = schema[\\\"title\\\"]\\n\",\n    \"\\n\",\n    \"        return cls._avro_schema(schema, namespace)\\n\",\n    \"\\n\",\n    \"    @classmethod\\n\",\n    \"    def avro_schema(\\n\",\n    \"        cls, by_alias: bool = True, namespace: Optional[str] = None\\n\",\n    \"    ) -> Dict[str, Any]:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Returns the Avro schema for the Pydantic class.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            by_alias (bool, optional): Generate schemas using aliases defined. Defaults to True.\\n\",\n    \"            namespace (Optional[str], optional): Optional namespace string for schema generation.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            Dict[str, Any]: The Avro schema for the model.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        schema = cls.schema(by_alias=by_alias)\\n\",\n    \"\\n\",\n    \"        if namespace is None:\\n\",\n    \"            # default namespace will be based on title\\n\",\n    \"            namespace = schema[\\\"title\\\"]\\n\",\n    \"\\n\",\n    \"        return cls._avro_schema(schema, namespace)\\n\",\n    \"\\n\",\n    \"    @staticmethod\\n\",\n    \"    def _avro_schema(schema: Dict[str, Any], namespace: str) -> Dict[str, Any]:\\n\",\n    \"        \\\"\\\"\\\"Return the avro schema for the given pydantic schema\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"        classes_seen = set()\\n\",\n    \"\\n\",\n    \"        def get_definition(ref: str, schema: Dict[str, Any]) -> Dict[str, Any]:\\n\",\n    \"            \\\"\\\"\\\"Reading definition of base schema for nested structs\\\"\\\"\\\"\\n\",\n    \"            id = ref.replace(\\\"#/definitions/\\\", \\\"\\\")\\n\",\n    \"            d = schema.get(\\\"definitions\\\", {}).get(id)\\n\",\n    \"            if d is None:\\n\",\n    \"                raise RuntimeError(f\\\"Definition {id} does not exist\\\")\\n\",\n    \"            return d  # type: ignore\\n\",\n    \"\\n\",\n    \"        def get_type(value: Dict[str, Any]) -> Dict[str, Any]:\\n\",\n    \"            \\\"\\\"\\\"Returns a type of a single field\\\"\\\"\\\"\\n\",\n    \"            t = value.get(\\\"type\\\")\\n\",\n    \"            f = value.get(\\\"format\\\")\\n\",\n    \"            r = value.get(\\\"$ref\\\")\\n\",\n    \"            a = value.get(\\\"additionalProperties\\\")\\n\",\n    \"            avro_type_dict: Dict[str, Any] = {}\\n\",\n    \"            if \\\"default\\\" in value:\\n\",\n    \"                avro_type_dict[\\\"default\\\"] = value.get(\\\"default\\\")\\n\",\n    \"            if \\\"description\\\" in value:\\n\",\n    \"                avro_type_dict[\\\"doc\\\"] = value.get(\\\"description\\\")\\n\",\n    \"            if \\\"allOf\\\" in value and len(value[\\\"allOf\\\"]) == 1:\\n\",\n    \"                r = value[\\\"allOf\\\"][0][\\\"$ref\\\"]\\n\",\n    \"            if r is not None:\\n\",\n    \"                class_name = r.replace(\\\"#/definitions/\\\", \\\"\\\")\\n\",\n    \"                if class_name in classes_seen:\\n\",\n    \"                    avro_type_dict[\\\"type\\\"] = class_name\\n\",\n    \"                else:\\n\",\n    \"                    d = get_definition(r, schema)\\n\",\n    \"                    if \\\"enum\\\" in d:\\n\",\n    \"                        avro_type_dict[\\\"type\\\"] = {\\n\",\n    \"                            \\\"type\\\": \\\"enum\\\",\\n\",\n    \"                            \\\"symbols\\\": [str(v) for v in d[\\\"enum\\\"]],\\n\",\n    \"                            \\\"name\\\": d[\\\"title\\\"],\\n\",\n    \"                        }\\n\",\n    \"                    else:\\n\",\n    \"                        avro_type_dict[\\\"type\\\"] = {\\n\",\n    \"                            \\\"type\\\": \\\"record\\\",\\n\",\n    \"                            \\\"fields\\\": get_fields(d),\\n\",\n    \"                            # Name of the struct should be unique true the complete schema\\n\",\n    \"                            # Because of this the path in the schema is tracked and used as name for a nested struct/array\\n\",\n    \"                            \\\"name\\\": class_name,\\n\",\n    \"                        }\\n\",\n    \"                    classes_seen.add(class_name)\\n\",\n    \"            elif t == \\\"array\\\":\\n\",\n    \"                items = value.get(\\\"items\\\")\\n\",\n    \"                tn = get_type(items)  # type: ignore\\n\",\n    \"                # If items in array are a object:\\n\",\n    \"                if \\\"$ref\\\" in items:  # type: ignore\\n\",\n    \"                    tn = tn[\\\"type\\\"]\\n\",\n    \"                # If items in array are a logicalType\\n\",\n    \"                if (\\n\",\n    \"                    isinstance(tn, dict)\\n\",\n    \"                    and isinstance(tn.get(\\\"type\\\", {}), dict)\\n\",\n    \"                    and tn.get(\\\"type\\\", {}).get(\\\"logicalType\\\") is not None\\n\",\n    \"                ):\\n\",\n    \"                    tn = tn[\\\"type\\\"]\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = {\\\"type\\\": \\\"array\\\", \\\"items\\\": tn}\\n\",\n    \"            elif t == \\\"string\\\" and f == \\\"date-time\\\":\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = {\\n\",\n    \"                    \\\"type\\\": \\\"long\\\",\\n\",\n    \"                    \\\"logicalType\\\": \\\"timestamp-micros\\\",\\n\",\n    \"                }\\n\",\n    \"            elif t == \\\"string\\\" and f == \\\"date\\\":\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = {\\n\",\n    \"                    \\\"type\\\": \\\"int\\\",\\n\",\n    \"                    \\\"logicalType\\\": \\\"date\\\",\\n\",\n    \"                }\\n\",\n    \"            elif t == \\\"string\\\" and f == \\\"time\\\":\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = {\\n\",\n    \"                    \\\"type\\\": \\\"long\\\",\\n\",\n    \"                    \\\"logicalType\\\": \\\"time-micros\\\",\\n\",\n    \"                }\\n\",\n    \"            elif t == \\\"string\\\" and f == \\\"uuid\\\":\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = {\\n\",\n    \"                    \\\"type\\\": \\\"string\\\",\\n\",\n    \"                    \\\"logicalType\\\": \\\"uuid\\\",\\n\",\n    \"                }\\n\",\n    \"            elif t == \\\"string\\\":\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = \\\"string\\\"\\n\",\n    \"            elif t == \\\"number\\\":\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = \\\"double\\\"\\n\",\n    \"            elif t == \\\"integer\\\":\\n\",\n    \"                # integer in python can be a long\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = \\\"long\\\"\\n\",\n    \"            elif t == \\\"boolean\\\":\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = \\\"boolean\\\"\\n\",\n    \"            elif t == \\\"object\\\":\\n\",\n    \"                if a is None:\\n\",\n    \"                    value_type = \\\"string\\\"\\n\",\n    \"                else:\\n\",\n    \"                    value_type = get_type(a)  # type: ignore\\n\",\n    \"                if isinstance(value_type, dict) and len(value_type) == 1:\\n\",\n    \"                    value_type = value_type.get(\\\"type\\\")  # type: ignore\\n\",\n    \"                avro_type_dict[\\\"type\\\"] = {\\\"type\\\": \\\"map\\\", \\\"values\\\": value_type}\\n\",\n    \"            else:\\n\",\n    \"                raise NotImplementedError(\\n\",\n    \"                    f\\\"Type '{t}' not support yet, \\\"\\n\",\n    \"                    f\\\"please report this at https://github.com/godatadriven/pydantic-avro/issues\\\"\\n\",\n    \"                )\\n\",\n    \"            return avro_type_dict\\n\",\n    \"\\n\",\n    \"        def get_fields(s: Dict[str, Any]) -> List[Dict[str, Any]]:\\n\",\n    \"            \\\"\\\"\\\"Return a list of fields of a struct\\\"\\\"\\\"\\n\",\n    \"            fields = []\\n\",\n    \"\\n\",\n    \"            required = s.get(\\\"required\\\", [])\\n\",\n    \"            for key, value in s.get(\\\"properties\\\", {}).items():\\n\",\n    \"                if \\\"type\\\" not in value and \\\"anyOf\\\" in value:\\n\",\n    \"                    any_of_types = value.pop(\\\"anyOf\\\")\\n\",\n    \"                    types = [x[\\\"type\\\"] for x in any_of_types if x[\\\"type\\\"] != \\\"null\\\"]\\n\",\n    \"                    value[\\\"type\\\"] = types[0]\\n\",\n    \"                avro_type_dict = get_type(value)\\n\",\n    \"                avro_type_dict[\\\"name\\\"] = key\\n\",\n    \"\\n\",\n    \"                if key not in required:\\n\",\n    \"                    if avro_type_dict.get(\\\"default\\\") is None:\\n\",\n    \"                        avro_type_dict[\\\"type\\\"] = [\\\"null\\\", avro_type_dict[\\\"type\\\"]]\\n\",\n    \"                        avro_type_dict[\\\"default\\\"] = None\\n\",\n    \"\\n\",\n    \"                fields.append(avro_type_dict)\\n\",\n    \"            return fields\\n\",\n    \"\\n\",\n    \"        fields = get_fields(schema)\\n\",\n    \"\\n\",\n    \"        return {\\n\",\n    \"            \\\"type\\\": \\\"record\\\",\\n\",\n    \"            \\\"namespace\\\": namespace,\\n\",\n    \"            \\\"name\\\": schema[\\\"title\\\"],\\n\",\n    \"            \\\"fields\\\": fields,\\n\",\n    \"        }\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"97c98333\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"test_user_schema = {\\n\",\n    \"    \\\"type\\\": \\\"record\\\",\\n\",\n    \"    \\\"namespace\\\": \\\"User\\\",\\n\",\n    \"    \\\"name\\\": \\\"User\\\",\\n\",\n    \"    \\\"fields\\\": [\\n\",\n    \"        {\\\"type\\\": \\\"string\\\", \\\"name\\\": \\\"name\\\"},\\n\",\n    \"        {\\\"type\\\": [\\\"null\\\", \\\"long\\\"], \\\"name\\\": \\\"favorite_number\\\", \\\"default\\\": None},\\n\",\n    \"        {\\\"type\\\": [\\\"null\\\", \\\"string\\\"], \\\"name\\\": \\\"favorite_color\\\", \\\"default\\\": None},\\n\",\n    \"    ],\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class User(BaseModel):\\n\",\n    \"    name: str\\n\",\n    \"    favorite_number: Optional[int] = None\\n\",\n    \"    favorite_color: Optional[str] = None\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"30d7c9a0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'type': 'record',\\n\",\n       \" 'namespace': 'User',\\n\",\n       \" 'name': 'User',\\n\",\n       \" 'fields': [{'type': 'string', 'name': 'name'},\\n\",\n       \"  {'default': None, 'type': ['null', 'long'], 'name': 'favorite_number'},\\n\",\n       \"  {'default': None, 'type': ['null', 'string'], 'name': 'favorite_color'}]}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"actual = AvroBase.avro_schema_for_pydantic_class(User)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == test_user_schema\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"934835d9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# ToDo\\n\",\n    \"# 1. Rewrite with fastavro - Done\\n\",\n    \"# 2. Generate schema from pydantic itself - Done\\n\",\n    \"#        - Pydantic to avro schema conversion methods - Done\\n\",\n    \"# 3. Generate pydantic class from avro schema\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"659da394\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.encoder\\\")\\n\",\n    \"def avro_encoder(msg: BaseModel) -> bytes:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Encoder to encode pydantic instances to avro message\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        msg: An instance of pydantic basemodel\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A bytes message which is encoded from pydantic basemodel\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    schema = fastavro.schema.parse_schema(AvroBase.avro_schema_for_pydantic_object(msg))\\n\",\n    \"    bytes_writer = io.BytesIO()\\n\",\n    \"    \\n\",\n    \"    d = msg.model_dump()\\n\",\n    \"    for k, v in d.items():\\n\",\n    \"        if \\\"pydantic_core\\\" in str(type(v)):\\n\",\n    \"            d[k] = str(v)\\n\",\n    \"    \\n\",\n    \"    fastavro.schemaless_writer(bytes_writer, schema, d)\\n\",\n    \"    raw_bytes = bytes_writer.getvalue()\\n\",\n    \"    return raw_bytes\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a89f790\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"b'\\\\x0eKumaran\\\\x02\\\\x12\\\\x02\\\\nblack'\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"msg = User(name=\\\"Kumaran\\\", favorite_number=9, favorite_color=\\\"black\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"actual = avro_encoder(msg)\\n\",\n    \"display(actual)\\n\",\n    \"\\n\",\n    \"assert isinstance(actual, bytes)\\n\",\n    \"assert actual == b\\\"\\\\x0eKumaran\\\\x02\\\\x12\\\\x02\\\\nblack\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cdbdbb80\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.encoder\\\")\\n\",\n    \"def avro_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decoder to decode avro encoded messages to pydantic model instance\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        raw_msg: Avro encoded bytes message received from Kafka topic\\n\",\n    \"        cls: Pydantic class; This pydantic class will be used to construct instance of same class\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        An instance of given pydantic class\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    schema = fastavro.schema.parse_schema(AvroBase.avro_schema_for_pydantic_class(cls))\\n\",\n    \"\\n\",\n    \"    bytes_reader = io.BytesIO(raw_msg)\\n\",\n    \"    msg_dict = fastavro.schemaless_reader(bytes_reader, schema)\\n\",\n    \"\\n\",\n    \"    return cls(**msg_dict)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c98ae71a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"User(name='123', favorite_number=0, favorite_color='111')\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"raw_msg = b\\\"\\\\x06123\\\\x02\\\\x00\\\\x02\\\\x06111\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"actual = avro_decoder(raw_msg, cls=User)\\n\",\n    \"display(actual)\\n\",\n    \"\\n\",\n    \"assert isinstance(actual, User)\\n\",\n    \"assert actual.name == \\\"123\\\"\\n\",\n    \"assert actual.favorite_number == 0\\n\",\n    \"assert actual.favorite_color == \\\"111\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7c9dce67\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"497b7549\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.encoder\\\")\\n\",\n    \"def avsc_to_pydantic(schema: Dict[str, Any]) -> Type[BaseModel]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Generate pydantic model from given Avro Schema\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        schema: Avro schema in dictionary format\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Pydantic model class built from given avro schema\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if \\\"type\\\" not in schema or schema[\\\"type\\\"] != \\\"record\\\":\\n\",\n    \"        raise AttributeError(\\\"Type not supported\\\")\\n\",\n    \"    if \\\"name\\\" not in schema:\\n\",\n    \"        raise AttributeError(\\\"Name is required\\\")\\n\",\n    \"    if \\\"fields\\\" not in schema:\\n\",\n    \"        raise AttributeError(\\\"fields are required\\\")\\n\",\n    \"\\n\",\n    \"    classes = {}\\n\",\n    \"\\n\",\n    \"    def get_python_type(t: Union[str, Dict[str, Any]]) -> str:\\n\",\n    \"        \\\"\\\"\\\"Returns python type for given avro type\\\"\\\"\\\"\\n\",\n    \"        optional = False\\n\",\n    \"        if isinstance(t, str):\\n\",\n    \"            if t == \\\"string\\\":\\n\",\n    \"                py_type = \\\"str\\\"\\n\",\n    \"            elif t == \\\"long\\\" or t == \\\"int\\\":\\n\",\n    \"                py_type = \\\"int\\\"\\n\",\n    \"            elif t == \\\"boolean\\\":\\n\",\n    \"                py_type = \\\"bool\\\"\\n\",\n    \"            elif t == \\\"double\\\" or t == \\\"float\\\":\\n\",\n    \"                py_type = \\\"float\\\"\\n\",\n    \"            elif t in classes:\\n\",\n    \"                py_type = t\\n\",\n    \"            else:\\n\",\n    \"                raise NotImplementedError(f\\\"Type {t} not supported yet\\\")\\n\",\n    \"        elif isinstance(t, list):\\n\",\n    \"            if \\\"null\\\" in t:\\n\",\n    \"                optional = True\\n\",\n    \"            if len(t) > 2 or (not optional and len(t) > 1):\\n\",\n    \"                raise NotImplementedError(\\\"Only a single type ia supported yet\\\")\\n\",\n    \"            c = t.copy()\\n\",\n    \"            c.remove(\\\"null\\\")\\n\",\n    \"            py_type = get_python_type(c[0])\\n\",\n    \"        elif t.get(\\\"logicalType\\\") == \\\"uuid\\\":\\n\",\n    \"            py_type = \\\"UUID\\\"\\n\",\n    \"        elif t.get(\\\"logicalType\\\") == \\\"decimal\\\":\\n\",\n    \"            py_type = \\\"Decimal\\\"\\n\",\n    \"        elif (\\n\",\n    \"            t.get(\\\"logicalType\\\") == \\\"timestamp-millis\\\"\\n\",\n    \"            or t.get(\\\"logicalType\\\") == \\\"timestamp-micros\\\"\\n\",\n    \"        ):\\n\",\n    \"            py_type = \\\"datetime\\\"\\n\",\n    \"        elif (\\n\",\n    \"            t.get(\\\"logicalType\\\") == \\\"time-millis\\\"\\n\",\n    \"            or t.get(\\\"logicalType\\\") == \\\"time-micros\\\"\\n\",\n    \"        ):\\n\",\n    \"            py_type = \\\"time\\\"\\n\",\n    \"        elif t.get(\\\"logicalType\\\") == \\\"date\\\":\\n\",\n    \"            py_type = \\\"date\\\"\\n\",\n    \"        elif t.get(\\\"type\\\") == \\\"enum\\\":\\n\",\n    \"            enum_name = t.get(\\\"name\\\")\\n\",\n    \"            if enum_name not in classes:\\n\",\n    \"                enum_class = f\\\"class {enum_name}(str, Enum):\\\\n\\\"\\n\",\n    \"                for s in t.get(\\\"symbols\\\"):  # type: ignore\\n\",\n    \"                    enum_class += f'    {s} = \\\"{s}\\\"\\\\n'\\n\",\n    \"                classes[enum_name] = enum_class\\n\",\n    \"            py_type = enum_name  # type: ignore\\n\",\n    \"        elif t.get(\\\"type\\\") == \\\"string\\\":\\n\",\n    \"            py_type = \\\"str\\\"\\n\",\n    \"        elif t.get(\\\"type\\\") == \\\"array\\\":\\n\",\n    \"            sub_type = get_python_type(t.get(\\\"items\\\"))  # type: ignore\\n\",\n    \"            py_type = f\\\"List[{sub_type}]\\\"\\n\",\n    \"        elif t.get(\\\"type\\\") == \\\"record\\\":\\n\",\n    \"            record_type_to_pydantic(t)\\n\",\n    \"            py_type = t.get(\\\"name\\\")  # type: ignore\\n\",\n    \"        elif t.get(\\\"type\\\") == \\\"map\\\":\\n\",\n    \"            value_type = get_python_type(t.get(\\\"values\\\"))  # type: ignore\\n\",\n    \"            py_type = f\\\"Dict[str, {value_type}]\\\"\\n\",\n    \"        else:\\n\",\n    \"            raise NotImplementedError(\\n\",\n    \"                f\\\"Type {t} not supported yet, \\\"\\n\",\n    \"                f\\\"please report this at https://github.com/godatadriven/pydantic-avro/issues\\\"\\n\",\n    \"            )\\n\",\n    \"        if optional:\\n\",\n    \"            return f\\\"Optional[{py_type}]\\\"\\n\",\n    \"        else:\\n\",\n    \"            return py_type\\n\",\n    \"\\n\",\n    \"    def record_type_to_pydantic(schema: Dict[str, Any]) -> Type[BaseModel]:\\n\",\n    \"        \\\"\\\"\\\"Convert a single avro record type to a pydantic class\\\"\\\"\\\"\\n\",\n    \"        name = (\\n\",\n    \"            schema[\\\"name\\\"]\\n\",\n    \"            if \\\".\\\" not in schema[\\\"name\\\"]\\n\",\n    \"            else schema[\\\"name\\\"].split(\\\".\\\")[-1]\\n\",\n    \"        )\\n\",\n    \"        current = f\\\"class {schema['name']}(BaseModel):\\\\n\\\"\\n\",\n    \"\\n\",\n    \"        kwargs: Dict[str, Tuple[str, Any]] = {}\\n\",\n    \"\\n\",\n    \"        if len(schema[\\\"fields\\\"]) == 0:\\n\",\n    \"            raise ValueError(\\\"Avro schema has no fields\\\")\\n\",\n    \"\\n\",\n    \"        for field in schema[\\\"fields\\\"]:\\n\",\n    \"            n = field[\\\"name\\\"]\\n\",\n    \"            t = get_python_type(field[\\\"type\\\"])\\n\",\n    \"            default = field.get(\\\"default\\\")\\n\",\n    \"            if \\\"default\\\" not in field:\\n\",\n    \"                kwargs[n] = (t, ...)\\n\",\n    \"                current += f\\\"    {n}: {t}\\\\n\\\"\\n\",\n    \"            elif isinstance(default, (bool, type(None))):\\n\",\n    \"                kwargs[n] = (t, default)\\n\",\n    \"                current += f\\\"    {n}: {t} = {default}\\\\n\\\"\\n\",\n    \"            else:\\n\",\n    \"                kwargs[n] = (t, default)\\n\",\n    \"                current += f\\\"    {n}: {t} = {json.dumps(default)}\\\\n\\\"\\n\",\n    \"\\n\",\n    \"        classes[name] = current\\n\",\n    \"        pydantic_model = create_model(name, __module__=__name__, **kwargs)  # type: ignore\\n\",\n    \"        return pydantic_model  # type: ignore\\n\",\n    \"\\n\",\n    \"    return record_type_to_pydantic(schema)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8dbc3c30\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'type': 'record',\\n\",\n       \" 'namespace': 'User',\\n\",\n       \" 'name': 'User',\\n\",\n       \" 'fields': [{'type': 'string', 'name': 'name'},\\n\",\n       \"  {'default': None, 'type': ['null', 'long'], 'name': 'favorite_number'},\\n\",\n       \"  {'default': None, 'type': ['null', 'string'], 'name': 'favorite_color'}]}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"__main__.User\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'name': FieldInfo(annotation=str, required=True),\\n\",\n       \" 'favorite_number': FieldInfo(annotation=Union[int, NoneType], required=False),\\n\",\n       \" 'favorite_color': FieldInfo(annotation=Union[str, NoneType], required=False)}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"User(name='Kumaran', favorite_number=9, favorite_color='black')\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"User(name='Kumaran', favorite_number=9, favorite_color='black')\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"user_schema = AvroBase.avro_schema_for_pydantic_class(User)\\n\",\n    \"display(user_schema)\\n\",\n    \"\\n\",\n    \"A = avsc_to_pydantic(user_schema)\\n\",\n    \"display(A)\\n\",\n    \"display(A.model_fields)\\n\",\n    \"# assert isinstance(A, ModelMetaclass)\\n\",\n    \"assert list(A.model_fields.keys()) == [\\\"name\\\", \\\"favorite_number\\\", \\\"favorite_color\\\"]\\n\",\n    \"\\n\",\n    \"a = A(name=\\\"Kumaran\\\", favorite_number=\\\"9\\\", favorite_color=\\\"black\\\")\\n\",\n    \"u = User(\\n\",\n    \"    name=\\\"Kumaran\\\", favorite_number=\\\"9\\\", favorite_color=\\\"black\\\"\\n\",\n    \")\\n\",\n    \"display(a)\\n\",\n    \"display(u)\\n\",\n    \"assert a.name == u.name\\n\",\n    \"assert a.favorite_number == u.favorite_number\\n\",\n    \"assert a.favorite_color == u.favorite_color\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"68efa351\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'type': 'record', 'namespace': 'User', 'name': 'User', 'fields': []}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<ExceptionInfo ValueError('Avro schema has no fields') tblen=3>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"user_schema = AvroBase.avro_schema_for_pydantic_class(User)\\n\",\n    \"user_schema[\\\"fields\\\"] = []\\n\",\n    \"\\n\",\n    \"display(user_schema)\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    A = avsc_to_pydantic(user_schema)\\n\",\n    \"display(e)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9cf574fc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/019_Json_Encode_Decoder.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e484e6a3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.encoder.json\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"330229f3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"/home/kumaran/.local/lib/python3.11/site-packages/pydantic/_internal/_config.py:257: UserWarning: Valid config keys have changed in V2:\\n\",\n      \"* 'json_encoders' has been removed\\n\",\n      \"  warnings.warn(message, UserWarning)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"import json\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import export\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"84ed548a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import json\\n\",\n    \"import tempfile\\n\",\n    \"\\n\",\n    \"from pydantic import Field\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"84c22900\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"40c6f220\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1e61e48a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"44803f0e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _to_json_utf8(o: Any) -> bytes:\\n\",\n    \"    \\\"\\\"\\\"Converts to JSON and then encodes with UTF-8\\\"\\\"\\\"\\n\",\n    \"    if hasattr(o, \\\"model_dump_json\\\"):\\n\",\n    \"        return o.model_dump_json().encode(\\\"utf-8\\\")  # type: ignore\\n\",\n    \"    else:\\n\",\n    \"        return json.dumps(o).encode(\\\"utf-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6adf56fc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert _to_json_utf8({\\\"a\\\": 1, \\\"b\\\": [2, 3]}) == b'{\\\"a\\\": 1, \\\"b\\\": [2, 3]}'\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class A(BaseModel):\\n\",\n    \"    name: str = Field()\\n\",\n    \"    age: int\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert _to_json_utf8(A(name=\\\"Davor\\\", age=12)) == b'{\\\"name\\\":\\\"Davor\\\",\\\"age\\\":12}'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7fb9e74d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class User(BaseModel):\\n\",\n    \"    name: str\\n\",\n    \"    favorite_number: Optional[int] = None\\n\",\n    \"    favorite_color: Optional[str] = None\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"659da394\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.encoder\\\")\\n\",\n    \"def json_encoder(msg: BaseModel) -> bytes:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Encoder to encode pydantic instances to json string\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        msg: An instance of pydantic basemodel\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Json string in bytes which is encoded from pydantic basemodel\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return _to_json_utf8(msg)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a89f790\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"b'{\\\"name\\\":\\\"Kumaran\\\",\\\"favorite_number\\\":9,\\\"favorite_color\\\":\\\"black\\\"}'\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"msg = User(name=\\\"Kumaran\\\", favorite_number=9, favorite_color=\\\"black\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"actual = json_encoder(msg)\\n\",\n    \"display(actual)\\n\",\n    \"\\n\",\n    \"assert isinstance(actual, bytes)\\n\",\n    \"assert actual == b'{\\\"name\\\":\\\"Kumaran\\\",\\\"favorite_number\\\":9,\\\"favorite_color\\\":\\\"black\\\"}'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cdbdbb80\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"fastkafka.encoder\\\")\\n\",\n    \"def json_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decoder to decode json string in bytes to pydantic model instance\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        raw_msg: Bytes message received from Kafka topic\\n\",\n    \"        cls: Pydantic class; This pydantic class will be used to construct instance of same class\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        An instance of given pydantic class\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    msg_dict = json.loads(raw_msg.decode(\\\"utf-8\\\"))\\n\",\n    \"\\n\",\n    \"    return cls(**msg_dict)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c98ae71a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"User(name='123', favorite_number=0, favorite_color='111')\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"raw_msg = b'{\\\"name\\\": \\\"123\\\", \\\"favorite_number\\\": 0, \\\"favorite_color\\\": \\\"111\\\"}'\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"actual = json_decoder(raw_msg, cls=User)\\n\",\n    \"display(actual)\\n\",\n    \"\\n\",\n    \"assert isinstance(actual, User)\\n\",\n    \"assert actual.name == \\\"123\\\"\\n\",\n    \"assert actual.favorite_number == 0\\n\",\n    \"assert actual.favorite_color == \\\"111\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9cf574fc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/020_Encoder_Export.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"287ff107\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp encoder\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"051196f4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"from fastkafka._components.encoder.avro import (\\n\",\n    \"    AvroBase,\\n\",\n    \"    avro_decoder,\\n\",\n    \"    avro_encoder,\\n\",\n    \"    avsc_to_pydantic,\\n\",\n    \")\\n\",\n    \"from fastkafka._components.encoder.json import json_decoder, json_encoder\\n\",\n    \"from fastkafka._components.meta import export\\n\",\n    \"\\n\",\n    \"__all__ = [\\n\",\n    \"    \\\"AvroBase\\\",\\n\",\n    \"    \\\"avro_decoder\\\",\\n\",\n    \"    \\\"avro_encoder\\\",\\n\",\n    \"    \\\"avsc_to_pydantic\\\",\\n\",\n    \"    \\\"json_decoder\\\",\\n\",\n    \"    \\\"json_encoder\\\",\\n\",\n    \"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d0b3de26\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert AvroBase.__module__ == \\\"fastkafka.encoder\\\"\\n\",\n    \"assert avro_decoder.__module__ == \\\"fastkafka.encoder\\\"\\n\",\n    \"assert avro_encoder.__module__ == \\\"fastkafka.encoder\\\"\\n\",\n    \"assert avsc_to_pydantic.__module__ == \\\"fastkafka.encoder\\\"\\n\",\n    \"assert json_decoder.__module__ == \\\"fastkafka.encoder\\\"\\n\",\n    \"assert json_encoder.__module__ == \\\"fastkafka.encoder\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"98638d2e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@export(\\\"_dummy\\\")\\n\",\n    \"def dummy() -> None:\\n\",\n    \"    pass\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9139b7ec\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/021_FastKafkaServer.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"28d8fbc7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _server\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7fcc07a5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import multiprocessing\\n\",\n    \"import platform\\n\",\n    \"import signal\\n\",\n    \"import threading\\n\",\n    \"from contextlib import contextmanager\\n\",\n    \"from typing import *\\n\",\n    \"from types import FrameType\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"import typer\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import _import_from_string\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"170ddd1c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import os\\n\",\n    \"from time import sleep\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel\\n\",\n    \"from typer.testing import CliRunner\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._components.test_dependencies import generate_app_in_tmp\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"62aaa890\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"55be7819\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"de03595b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__, level=20)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b9ee8c91\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a2d2a997\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class ServerProcess:\\n\",\n    \"    def __init__(self, app: str, kafka_broker_name: str):\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Represents a server process for running the FastKafka application.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            app (str): Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\\n\",\n    \"            kafka_broker_name (str): The name of the Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self.app = app\\n\",\n    \"        self.should_exit = False\\n\",\n    \"        self.kafka_broker_name = kafka_broker_name\\n\",\n    \"\\n\",\n    \"    def run(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Runs the FastKafka application server process.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        return asyncio.run(self._serve())\\n\",\n    \"\\n\",\n    \"    async def _serve(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Internal method that runs the FastKafka application server.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        self._install_signal_handlers()\\n\",\n    \"\\n\",\n    \"        self.application = _import_from_string(self.app)\\n\",\n    \"        self.application.set_kafka_broker(self.kafka_broker_name)\\n\",\n    \"\\n\",\n    \"        async with self.application:\\n\",\n    \"            await self._main_loop()\\n\",\n    \"\\n\",\n    \"    def _install_signal_handlers(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Installs signal handlers for handling termination signals.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        if threading.current_thread() is not threading.main_thread():\\n\",\n    \"            raise RuntimeError()\\n\",\n    \"\\n\",\n    \"        loop = asyncio.get_event_loop()\\n\",\n    \"\\n\",\n    \"        HANDLED_SIGNALS = (\\n\",\n    \"            signal.SIGINT,  # Unix signal 2. Sent by Ctrl+C.\\n\",\n    \"            signal.SIGTERM,  # Unix signal 15. Sent by `kill <pid>`.\\n\",\n    \"        )\\n\",\n    \"        if platform.system() == \\\"Windows\\\":\\n\",\n    \"            HANDLED_SIGNALS = (*HANDLED_SIGNALS, signal.SIGBREAK) # type: ignore\\n\",\n    \"\\n\",\n    \"        def handle_windows_exit(signum: int, frame: Optional[FrameType]) -> None:\\n\",\n    \"            self.should_exit = True\\n\",\n    \"\\n\",\n    \"        def handle_exit(sig: int) -> None:\\n\",\n    \"            self.should_exit = True\\n\",\n    \"\\n\",\n    \"        for sig in HANDLED_SIGNALS:\\n\",\n    \"            if platform.system() == \\\"Windows\\\":\\n\",\n    \"                signal.signal(sig, handle_windows_exit)\\n\",\n    \"            else:\\n\",\n    \"                loop.add_signal_handler(sig, handle_exit, sig)\\n\",\n    \"\\n\",\n    \"    async def _main_loop(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Main loop for the FastKafka application server process.\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        while not self.should_exit:\\n\",\n    \"            await asyncio.sleep(0.1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ced70e01\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"_app = typer.Typer()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@_app.command()\\n\",\n    \"def run_fastkafka_server_process(\\n\",\n    \"    app: str = typer.Argument(\\n\",\n    \"        ...,\\n\",\n    \"        help=\\\"Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\\\",\\n\",\n    \"    ),\\n\",\n    \"    kafka_broker: str = typer.Option(\\n\",\n    \"        ...,\\n\",\n    \"        help=\\\"Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\\\",\\n\",\n    \"    ),\\n\",\n    \") -> None:\\n\",\n    \"    ServerProcess(app, kafka_broker).run()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e1de33fe\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"WARNING: make sure you save the notebook before running this cell\\n\",\n      \"\\n\",\n      \"Exporting and installing the new version of the CLI command...\\n\",\n      \"Defaulting to user installation because normal site-packages is not writeable\\n\",\n      \"Obtaining file:///work/fastkafka\\n\",\n      \"  Preparing metadata (setup.py): started\\n\",\n      \"  Preparing metadata (setup.py): finished with status 'done'\\n\",\n      \"Requirement already satisfied: aiokafka>=0.8.0 in /home/kumaran/.local/lib/python3.11/site-packages (from fastkafka==0.8.0) (0.8.1)\\n\",\n      \"Requirement already satisfied: anyio>=3.0 in /usr/local/lib/python3.11/dist-packages (from fastkafka==0.8.0) (3.7.0)\\n\",\n      \"Requirement already satisfied: asyncer>=0.0.2 in /home/kumaran/.local/lib/python3.11/site-packages (from fastkafka==0.8.0) (0.0.2)\\n\",\n      \"Requirement already satisfied: docstring-parser>=0.15 in /home/kumaran/.local/lib/python3.11/site-packages (from fastkafka==0.8.0) (0.15)\\n\",\n      \"Requirement already satisfied: nest-asyncio>=1.5.6 in /usr/local/lib/python3.11/dist-packages (from fastkafka==0.8.0) (1.5.6)\\n\",\n      \"Requirement already satisfied: pydantic>=2.0 in /home/kumaran/.local/lib/python3.11/site-packages (from fastkafka==0.8.0) (2.0.2)\\n\",\n      \"Requirement already satisfied: tqdm>=4.62 in /home/kumaran/.local/lib/python3.11/site-packages (from fastkafka==0.8.0) (4.65.0)\\n\",\n      \"Requirement already satisfied: typer>=0.7.0 in /home/kumaran/.local/lib/python3.11/site-packages (from fastkafka==0.8.0) (0.9.0)\\n\",\n      \"Requirement already satisfied: async-timeout in /home/kumaran/.local/lib/python3.11/site-packages (from aiokafka>=0.8.0->fastkafka==0.8.0) (4.0.2)\\n\",\n      \"Requirement already satisfied: kafka-python>=2.0.2 in /home/kumaran/.local/lib/python3.11/site-packages (from aiokafka>=0.8.0->fastkafka==0.8.0) (2.0.2)\\n\",\n      \"Requirement already satisfied: packaging in /usr/local/lib/python3.11/dist-packages (from aiokafka>=0.8.0->fastkafka==0.8.0) (23.1)\\n\",\n      \"Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/dist-packages (from anyio>=3.0->fastkafka==0.8.0) (3.4)\\n\",\n      \"Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.11/dist-packages (from anyio>=3.0->fastkafka==0.8.0) (1.3.0)\\n\",\n      \"Requirement already satisfied: annotated-types>=0.4.0 in /home/kumaran/.local/lib/python3.11/site-packages (from pydantic>=2.0->fastkafka==0.8.0) (0.5.0)\\n\",\n      \"Requirement already satisfied: pydantic-core==2.1.2 in /home/kumaran/.local/lib/python3.11/site-packages (from pydantic>=2.0->fastkafka==0.8.0) (2.1.2)\\n\",\n      \"Requirement already satisfied: typing-extensions>=4.6.1 in /home/kumaran/.local/lib/python3.11/site-packages (from pydantic>=2.0->fastkafka==0.8.0) (4.7.1)\\n\",\n      \"Requirement already satisfied: click<9.0.0,>=7.1.1 in /home/kumaran/.local/lib/python3.11/site-packages (from typer>=0.7.0->fastkafka==0.8.0) (8.1.4)\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"WARNING: fastkafka 0.8.0 does not provide the extra 'all'\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Installing collected packages: fastkafka\\n\",\n      \"  Attempting uninstall: fastkafka\\n\",\n      \"    Found existing installation: fastkafka 0.8.0\\n\",\n      \"    Uninstalling fastkafka-0.8.0:\\n\",\n      \"      Successfully uninstalled fastkafka-0.8.0\\n\",\n      \"  Running setup.py develop for fastkafka\\n\",\n      \"Successfully installed fastkafka-0.8.0\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"print(\\\"WARNING: make sure you save the notebook before running this cell\\\\n\\\")\\n\",\n    \"\\n\",\n    \"print(\\\"Exporting and installing the new version of the CLI command...\\\")\\n\",\n    \"await asyncio.create_subprocess_exec(\\\"nbdev_export\\\")\\n\",\n    \"export_process = await asyncio.create_subprocess_exec(\\\"nbdev_export\\\")\\n\",\n    \"await export_process.wait()\\n\",\n    \"assert export_process.returncode == 0\\n\",\n    \"\\n\",\n    \"install_process = await asyncio.create_subprocess_exec(\\n\",\n    \"    \\\"pip\\\", \\\"install\\\", \\\"-e\\\", \\\"..[all]\\\"\\n\",\n    \")\\n\",\n    \"await install_process.wait()\\n\",\n    \"assert install_process.returncode == 0\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a5b0fe1a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">run-fastkafka-server-process [OPTIONS] APP                                                                 </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mrun-fastkafka-server-process [OPTIONS] APP\\u001b[0m\\u001b[1m                                                                \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #800000; text-decoration-color: #800000\\\">*</span>    app      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  Input in the form of 'path:app', where **path** is the path to a python file and **app** is <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     an object of type **FastKafka**.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: None]                                                                            </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #bf7f7f; text-decoration-color: #bf7f7f\\\">[required]                                                                                 </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Arguments \\u001b[0m\\u001b[2m────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[31m*\\u001b[0m    app      \\u001b[1;33mTEXT\\u001b[0m  Input in the form of 'path:app', where **path** is the path to a python file and **app** is \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     an object of type **FastKafka**.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2m[default: None]                                                                            \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2;31m[required]                                                                                 \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #800000; text-decoration-color: #800000\\\">*</span>  <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--kafka-broker</span>              <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  Kafka broker, one of the keys of the kafka_brokers dictionary passed in    <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      the constructor of FastKafka class.                                        <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: None]                                                           </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      <span style=\\\"color: #bf7f7f; text-decoration-color: #bf7f7f\\\">[required]                                                                </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>    <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--install-completion</span>        <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">    </span>  Install completion for the current shell.                                  <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>    <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--show-completion</span>           <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">    </span>  Show completion for the current shell, to copy it or customize the         <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      installation.                                                              <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>    <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>                      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">    </span>  Show this message and exit.                                                <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[31m*\\u001b[0m  \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-kafka\\u001b[0m\\u001b[1;36m-broker\\u001b[0m              \\u001b[1;33mTEXT\\u001b[0m  Kafka broker, one of the keys of the kafka_brokers dictionary passed in    \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      the constructor of FastKafka class.                                        \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      \\u001b[2m[default: None]                                                           \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      \\u001b[2;31m[required]                                                                \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m    \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-install\\u001b[0m\\u001b[1;36m-completion\\u001b[0m        \\u001b[1;33m    \\u001b[0m  Install completion for the current shell.                                  \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m    \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-show\\u001b[0m\\u001b[1;36m-completion\\u001b[0m           \\u001b[1;33m    \\u001b[0m  Show completion for the current shell, to copy it or customize the         \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      installation.                                                              \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m    \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m                      \\u001b[1;33m    \\u001b[0m  Show this message and exit.                                                \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"runner = CliRunner()\\n\",\n    \"result = runner.invoke(_app, [\\\"run_fastkafka_server_process\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"52e1c583\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def run_fastkafka_server(num_workers: int, app: str, kafka_broker: str) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Runs the FastKafka server with multiple worker processes.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        num_workers (int): Number of FastKafka instances to run.\\n\",\n    \"        app (str): Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\\n\",\n    \"        kafka_broker (str): Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    loop = asyncio.get_event_loop()\\n\",\n    \"\\n\",\n    \"    HANDLED_SIGNALS = (\\n\",\n    \"        signal.SIGINT,  # Unix signal 2. Sent by Ctrl+C.\\n\",\n    \"        signal.SIGTERM,  # Unix signal 15. Sent by `kill <pid>`.\\n\",\n    \"    )\\n\",\n    \"    if platform.system() == \\\"Windows\\\":\\n\",\n    \"            HANDLED_SIGNALS = (*HANDLED_SIGNALS, signal.SIGBREAK) # type: ignore\\n\",\n    \"\\n\",\n    \"    d = {\\\"should_exit\\\": False}\\n\",\n    \"\\n\",\n    \"    def handle_windows_exit(\\n\",\n    \"        signum: int, frame: Optional[FrameType], d: Dict[str, bool] = d\\n\",\n    \"    ) -> None:\\n\",\n    \"        d[\\\"should_exit\\\"] = True\\n\",\n    \"\\n\",\n    \"    def handle_exit(sig: int, d: Dict[str, bool] = d) -> None:\\n\",\n    \"        d[\\\"should_exit\\\"] = True\\n\",\n    \"\\n\",\n    \"    for sig in HANDLED_SIGNALS:\\n\",\n    \"        if platform.system() == \\\"Windows\\\":\\n\",\n    \"            signal.signal(sig, handle_windows_exit)\\n\",\n    \"        else:\\n\",\n    \"            loop.add_signal_handler(sig, handle_exit, sig)\\n\",\n    \"\\n\",\n    \"    async with asyncer.create_task_group() as tg:\\n\",\n    \"        args = [\\n\",\n    \"            \\\"run_fastkafka_server_process\\\",\\n\",\n    \"            \\\"--kafka-broker\\\",\\n\",\n    \"            kafka_broker,\\n\",\n    \"            app,\\n\",\n    \"        ]\\n\",\n    \"        tasks = [\\n\",\n    \"            tg.soonify(asyncio.create_subprocess_exec)(\\n\",\n    \"                *args,\\n\",\n    \"                limit=1024*1024, # Set StreamReader buffer limit to 1MB\\n\",\n    \"                stdout=asyncio.subprocess.PIPE,\\n\",\n    \"                stdin=asyncio.subprocess.PIPE,\\n\",\n    \"            )\\n\",\n    \"            for i in range(num_workers)\\n\",\n    \"        ]\\n\",\n    \"\\n\",\n    \"    procs = [task.value for task in tasks]\\n\",\n    \"\\n\",\n    \"    async def log_output(\\n\",\n    \"        output: Optional[asyncio.StreamReader], pid: int, d: Dict[str, bool] = d\\n\",\n    \"    ) -> None:\\n\",\n    \"        if output is None:\\n\",\n    \"            raise RuntimeError(\\\"Expected StreamReader, got None. Is stdout piped?\\\")\\n\",\n    \"        while not output.at_eof():\\n\",\n    \"            try:\\n\",\n    \"                outs = await output.readline()\\n\",\n    \"            except ValueError:\\n\",\n    \"                typer.echo(f\\\"[{pid:03d}]: Failed to read log output\\\", nl=False)\\n\",\n    \"                continue\\n\",\n    \"            if outs != b\\\"\\\":\\n\",\n    \"                typer.echo(f\\\"[{pid:03d}]: \\\" + outs.decode(\\\"utf-8\\\").strip(), nl=False)\\n\",\n    \"\\n\",\n    \"    async with asyncer.create_task_group() as tg:\\n\",\n    \"        for proc in procs:\\n\",\n    \"            tg.soonify(log_output)(proc.stdout, proc.pid)\\n\",\n    \"\\n\",\n    \"        while not d[\\\"should_exit\\\"]:\\n\",\n    \"            await asyncio.sleep(0.2)\\n\",\n    \"\\n\",\n    \"        typer.echo(\\\"Starting process cleanup, this may take a few seconds...\\\")\\n\",\n    \"        for proc in procs:\\n\",\n    \"            tg.soonify(terminate_asyncio_process)(proc)\\n\",\n    \"\\n\",\n    \"    for proc in procs:\\n\",\n    \"        output, _ = await proc.communicate()\\n\",\n    \"        if output:\\n\",\n    \"            typer.echo(f\\\"[{proc.pid:03d}]: \\\" + output.decode(\\\"utf-8\\\").strip(), nl=False)\\n\",\n    \"\\n\",\n    \"    returncodes = [proc.returncode for proc in procs]\\n\",\n    \"    if not returncodes == [0] * len(procs):\\n\",\n    \"        typer.secho(\\n\",\n    \"            f\\\"Return codes are not all zero: {returncodes}\\\",\\n\",\n    \"            err=True,\\n\",\n    \"            fg=typer.colors.RED,\\n\",\n    \"        )\\n\",\n    \"        raise typer.Exit(1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"804d0df0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@contextmanager\\n\",\n    \"def run_in_process(\\n\",\n    \"    target: Callable[..., Any]\\n\",\n    \") -> Generator[multiprocessing.Process, None, None]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Runs the target function in a separate process.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        target (Callable[..., Any]): The function to run in a separate process.\\n\",\n    \"\\n\",\n    \"    Yields:\\n\",\n    \"        Generator[multiprocessing.Process, None, None]: A generator that yields the process object.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    p = multiprocessing.Process(target=target)\\n\",\n    \"    try:\\n\",\n    \"        p.start()\\n\",\n    \"        yield p\\n\",\n    \"    except Exception as e:\\n\",\n    \"        print(f\\\"Exception raised {e=}\\\")\\n\",\n    \"    finally:\\n\",\n    \"        p.terminate()\\n\",\n    \"        p.join()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f7c8cea8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:10000\\n\",\n      \"[88210]: 23-07-08 16:11:08.142 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88212]: 23-07-08 16:11:08.142 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88214]: 23-07-08 16:11:08.142 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88208]: 23-07-08 16:11:08.144 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88214]: 23-07-08 16:11:08.169 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88212]: 23-07-08 16:11:08.169 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88210]: 23-07-08 16:11:08.169 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88208]: 23-07-08 16:11:08.169 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88208]: 23-07-08 16:11:08.176 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88212]: 23-07-08 16:11:08.176 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88210]: 23-07-08 16:11:08.176 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88214]: 23-07-08 16:11:08.176 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88208]: 23-07-08 16:11:08.182 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88212]: 23-07-08 16:11:08.182 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88210]: 23-07-08 16:11:08.182 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88214]: 23-07-08 16:11:08.182 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:10000'}'\\n\",\n      \"[88212]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88210]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88214]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88208]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88212]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88212]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88212]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88214]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88214]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88214]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88210]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88210]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88210]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88208]: 23-07-08 16:11:08.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88208]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[88208]: 23-07-08 16:11:08.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': '127.0.0.1:10000_group', 'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:10000'}\\n\",\n      \"[88212]: 23-07-08 16:11:08.198 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88212]: 23-07-08 16:11:08.199 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'realitime_data'})\\n\",\n      \"[88212]: 23-07-08 16:11:08.199 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'realitime_data'}\\n\",\n      \"[88212]: 23-07-08 16:11:08.199 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88212]: 23-07-08 16:11:08.204 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88212]: 23-07-08 16:11:08.205 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'training_data'})\\n\",\n      \"[88212]: 23-07-08 16:11:08.205 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'training_data'}\\n\",\n      \"[88212]: 23-07-08 16:11:08.205 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88210]: 23-07-08 16:11:08.199 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88210]: 23-07-08 16:11:08.199 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'training_data'})\\n\",\n      \"[88210]: 23-07-08 16:11:08.199 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'training_data'}\\n\",\n      \"[88210]: 23-07-08 16:11:08.199 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88210]: 23-07-08 16:11:08.205 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88210]: 23-07-08 16:11:08.205 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'realitime_data'})\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[88210]: 23-07-08 16:11:08.205 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'realitime_data'}\\n\",\n      \"[88210]: 23-07-08 16:11:08.205 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88214]: 23-07-08 16:11:08.199 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88214]: 23-07-08 16:11:08.199 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'realitime_data'})\\n\",\n      \"[88214]: 23-07-08 16:11:08.199 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'realitime_data'}\\n\",\n      \"[88214]: 23-07-08 16:11:08.199 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88214]: 23-07-08 16:11:08.204 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88214]: 23-07-08 16:11:08.205 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'training_data'})\\n\",\n      \"[88214]: 23-07-08 16:11:08.205 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'training_data'}\\n\",\n      \"[88214]: 23-07-08 16:11:08.205 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88208]: 23-07-08 16:11:08.200 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88208]: 23-07-08 16:11:08.200 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'realitime_data'})\\n\",\n      \"[88208]: 23-07-08 16:11:08.200 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'realitime_data'}\\n\",\n      \"[88208]: 23-07-08 16:11:08.200 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88208]: 23-07-08 16:11:08.205 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[88208]: 23-07-08 16:11:08.206 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'training_data'})\\n\",\n      \"[88208]: 23-07-08 16:11:08.207 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'training_data'}\\n\",\n      \"[88208]: 23-07-08 16:11:08.207 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[88212]: 23-07-08 16:11:08.220 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.221 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.227 [ERROR] aiokafka.cluster: Topic realitime_data not found in cluster metadata\\n\",\n      \"[88212]: 23-07-08 16:11:08.228 [ERROR] aiokafka.cluster: Topic training_data not found in cluster metadata\\n\",\n      \"[88214]: 23-07-08 16:11:08.220 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.221 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.228 [ERROR] aiokafka.cluster: Topic realitime_data not found in cluster metadata\\n\",\n      \"[88210]: 23-07-08 16:11:08.220 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.221 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.228 [ERROR] aiokafka.cluster: Topic training_data not found in cluster metadata\\n\",\n      \"[88208]: 23-07-08 16:11:08.220 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.227 [ERROR] aiokafka.cluster: Topic realitime_data not found in cluster metadata\\n\",\n      \"[88208]: 23-07-08 16:11:08.264 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.264 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.264 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.268 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.330 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.330 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.330 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.330 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.331 [ERROR] aiokafka.cluster: Topic realitime_data not found in cluster metadata\\n\",\n      \"[88212]: 23-07-08 16:11:08.331 [ERROR] aiokafka.cluster: Topic training_data not found in cluster metadata\\n\",\n      \"[88208]: 23-07-08 16:11:08.339 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.339 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.339 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.344 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.367 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.368 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.370 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.370 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.374 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.377 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.434 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.436 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.436 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.439 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.441 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.443 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.444 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.446 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.448 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.450 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.472 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.475 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[88210]: 23-07-08 16:11:08.476 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.479 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.481 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.484 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.540 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.543 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.543 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.546 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.546 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.548 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.549 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.552 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.556 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.558 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.579 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.581 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.582 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.584 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.588 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.590 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.646 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.648 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.649 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.651 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.651 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.654 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.655 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.657 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.661 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.663 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.684 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.686 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.686 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.689 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.693 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88208]: 23-07-08 16:11:08.695 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.753 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.756 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88212]: 23-07-08 16:11:08.757 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88212]: 23-07-08 16:11:08.759 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88210]: 23-07-08 16:11:08.760 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.763 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88210]: 23-07-08 16:11:08.764 [WARNING] aiokafka.cluster: Topic training_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.767 [ERROR] aiokafka.consumer.group_coordinator: Group Coordinator Request failed: [Error 15] CoordinatorNotAvailableError\\n\",\n      \"[88214]: 23-07-08 16:11:08.767 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88208]: 23-07-08 16:11:08.770 [WARNING] aiokafka.cluster: Topic realitime_data is not available during auto-create initialization\\n\",\n      \"[88214]: 23-07-08 16:11:08.794 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:08.794 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:08.794 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:08.795 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:08.795 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:08.795 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:08.799 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:08.799 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:08.800 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:08.825 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 1) with member_id aiokafka-0.8.1-948daba9-9ad3-425f-ba79-8eeb737b877f\\n\",\n      \"[88208]: 23-07-08 16:11:08.826 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 1) with member_id aiokafka-0.8.1-ba7c4e0b-0df8-4a5a-87a6-eec33b0ff8de\\n\",\n      \"[88210]: 23-07-08 16:11:08.827 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 1) with member_id aiokafka-0.8.1-819b4212-60d8-488f-a9f6-3280c6ab2bd2\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[88210]: 23-07-08 16:11:08.827 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\\n\",\n      \"[88210]: 23-07-08 16:11:08.829 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'training_data': 1, 'realitime_data': 1}. \\n\",\n      \"[88212]: 23-07-08 16:11:08.861 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88212]: 23-07-08 16:11:08.861 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88212]: 23-07-08 16:11:08.861 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88212]: 23-07-08 16:11:08.863 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88212]: 23-07-08 16:11:08.863 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88212]: 23-07-08 16:11:08.863 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:08.868 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:08.869 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:08.869 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:08.872 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:08.872 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:08.872 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:08.873 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 1\\n\",\n      \"[88214]: 23-07-08 16:11:08.873 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='training_data', partition=0)} for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:08.874 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 1\\n\",\n      \"[88208]: 23-07-08 16:11:08.874 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:08.875 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:08.875 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:08.875 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:08.875 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 1\\n\",\n      \"[88210]: 23-07-08 16:11:08.875 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='realitime_data', partition=0)} for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:11.877 [WARNING] aiokafka.consumer.group_coordinator: Heartbeat failed for group 127.0.0.1:10000_group because it is rebalancing\\n\",\n      \"[88214]: 23-07-08 16:11:11.877 [WARNING] aiokafka.consumer.group_coordinator: Heartbeat failed for group 127.0.0.1:10000_group because it is rebalancing\\n\",\n      \"[88210]: 23-07-08 16:11:11.877 [WARNING] aiokafka.consumer.group_coordinator: Heartbeat failed for group 127.0.0.1:10000_group because it is rebalancing\\n\",\n      \"[88208]: 23-07-08 16:11:11.877 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions frozenset() for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:11.877 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:11.890 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions frozenset({TopicPartition(topic='realitime_data', partition=0)}) for group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:11.890 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions frozenset({TopicPartition(topic='training_data', partition=0)}) for group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:11.890 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:11.890 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:11.891 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-a02307ca-1976-4092-a63a-74eddd0aa625\\n\",\n      \"[88212]: 23-07-08 16:11:11.892 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-54597d06-fb19-4c8f-a916-95bd77587674\\n\",\n      \"[88212]: 23-07-08 16:11:11.892 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-9a2bca7b-0e90-403e-a418-d7df5022f1db\\n\",\n      \"[88210]: 23-07-08 16:11:11.892 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-7c7affa3-7e32-49d1-85b5-71066432532e\\n\",\n      \"[88210]: 23-07-08 16:11:11.893 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-819b4212-60d8-488f-a9f6-3280c6ab2bd2\\n\",\n      \"[88210]: 23-07-08 16:11:11.894 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\\n\",\n      \"[88208]: 23-07-08 16:11:11.893 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-ba7c4e0b-0df8-4a5a-87a6-eec33b0ff8de\\n\",\n      \"[88208]: 23-07-08 16:11:11.893 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-cf4acaab-4ebd-4010-a60c-147126dc8711\\n\",\n      \"[88214]: 23-07-08 16:11:11.894 [INFO] aiokafka.consumer.group_coordinator: Joined group '127.0.0.1:10000_group' (generation 2) with member_id aiokafka-0.8.1-948daba9-9ad3-425f-ba79-8eeb737b877f\\n\",\n      \"[88214]: 23-07-08 16:11:11.897 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\",\n      \"[88214]: 23-07-08 16:11:11.897 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88214]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\",\n      \"[88214]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88212]: 23-07-08 16:11:11.897 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\",\n      \"[88212]: 23-07-08 16:11:11.897 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='realitime_data', partition=0)} for group 127.0.0.1:10000_group\\n\",\n      \"[88212]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\",\n      \"[88212]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\",\n      \"[88210]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='training_data', partition=0)} for group 127.0.0.1:10000_group\\n\",\n      \"[88210]: 23-07-08 16:11:11.899 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\",\n      \"[88210]: 23-07-08 16:11:11.899 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[88208]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"[88208]: 23-07-08 16:11:11.898 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group 127.0.0.1:10000_group with generation 2\\n\",\n      \"[88208]: 23-07-08 16:11:11.899 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions set() for group 127.0.0.1:10000_group\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 88208...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 88210...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 88212...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 88214...\\n\",\n      \"[88214]: 23-07-08 16:11:23.704 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88214]: 23-07-08 16:11:23.704 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88214]: 23-07-08 16:11:23.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88214]: 23-07-08 16:11:23.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[88214]: 23-07-08 16:11:23.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88214]: 23-07-08 16:11:23.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[88208]: 23-07-08 16:11:23.706 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88208]: 23-07-08 16:11:23.706 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88208]: 23-07-08 16:11:23.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88208]: 23-07-08 16:11:23.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[88208]: 23-07-08 16:11:23.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88208]: 23-07-08 16:11:23.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[88212]: 23-07-08 16:11:23.712 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88212]: 23-07-08 16:11:23.712 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88212]: 23-07-08 16:11:23.712 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[88210]: 23-07-08 16:11:23.714 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88210]: 23-07-08 16:11:23.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88210]: 23-07-08 16:11:23.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[88212]: 23-07-08 16:11:23.715 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88212]: 23-07-08 16:11:23.716 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88212]: 23-07-08 16:11:23.716 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[88210]: 23-07-08 16:11:23.719 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"[88210]: 23-07-08 16:11:23.719 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[88210]: 23-07-08 16:11:23.719 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 88214 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 88208 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 88212 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 88210 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 87739...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 87739 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 87358...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 87358 terminated.\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"listener_port = 10000\\n\",\n    \"async with ApacheKafkaBroker(listener_port=listener_port) as bootstrap_server:\\n\",\n    \"    os.environ[\\\"KAFKA_HOSTNAME\\\"], os.environ[\\\"KAFKA_PORT\\\"] = bootstrap_server.split(\\\":\\\")\\n\",\n    \"\\n\",\n    \"    with generate_app_in_tmp() as app:\\n\",\n    \"\\n\",\n    \"        def run_fastkafka_server_test():\\n\",\n    \"            asyncio.run(run_fastkafka_server(4, app, \\\"localhost\\\"))\\n\",\n    \"\\n\",\n    \"        with run_in_process(run_fastkafka_server_test) as p:\\n\",\n    \"            sleep(15)\\n\",\n    \"\\n\",\n    \"        assert p.exitcode == 0, p.exitcode\\n\",\n    \"        p.close()\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2d11d7eb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/022_Subprocess.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"28d8fbc7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components._subprocess\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7fcc07a5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import platform\\n\",\n    \"import signal\\n\",\n    \"from typing import *\\n\",\n    \"from types import FrameType\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"import typer\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import get_logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"170ddd1c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import sys\\n\",\n    \"import os\\n\",\n    \"import platform\\n\",\n    \"from time import sleep\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"62aaa890\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"55be7819\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"de03595b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b9ee8c91\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e44f49ae\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def terminate_asyncio_process(p: asyncio.subprocess.Process) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Terminates an asyncio process.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        p: The asyncio.subprocess.Process instance.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    logger.info(f\\\"terminate_asyncio_process(): Terminating the process {p.pid}...\\\")\\n\",\n    \"    # Check if SIGINT already propagated to process\\n\",\n    \"    try:\\n\",\n    \"        await asyncio.wait_for(p.wait(), 1)\\n\",\n    \"        logger.info(\\n\",\n    \"            f\\\"terminate_asyncio_process(): Process {p.pid} was already terminated.\\\"\\n\",\n    \"        )\\n\",\n    \"        return\\n\",\n    \"    except asyncio.TimeoutError:\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    for i in range(3):\\n\",\n    \"        if platform.system() == \\\"Windows\\\":\\n\",\n    \"            import psutil\\n\",\n    \"\\n\",\n    \"            try:\\n\",\n    \"                parent = psutil.Process(p.pid)\\n\",\n    \"                children = parent.children(recursive=True)\\n\",\n    \"                for child in children:\\n\",\n    \"                    child.kill()\\n\",\n    \"                p.send_signal(signal.CTRL_BREAK_EVENT)  # type: ignore\\n\",\n    \"            except psutil.NoSuchProcess:\\n\",\n    \"                pass\\n\",\n    \"        else:\\n\",\n    \"            p.terminate()\\n\",\n    \"        try:\\n\",\n    \"            await asyncio.wait_for(p.wait(), 10)\\n\",\n    \"            logger.info(f\\\"terminate_asyncio_process(): Process {p.pid} terminated.\\\")\\n\",\n    \"            return\\n\",\n    \"        except asyncio.TimeoutError:\\n\",\n    \"            logger.warning(\\n\",\n    \"                f\\\"terminate_asyncio_process(): Process {p.pid} not terminated, retrying...\\\"\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"    logger.warning(f\\\"Killing the process {p.pid}...\\\")\\n\",\n    \"    p.kill()\\n\",\n    \"    await p.wait()\\n\",\n    \"    logger.warning(f\\\"terminate_asyncio_process(): Process {p.pid} killed!\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a5b0fe1a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: terminate_asyncio_process(): Terminating the process 743...\\n\",\n      \"[INFO] __main__: terminate_asyncio_process(): Process 743 terminated.\\n\",\n      \"\\u001b[?1l\\u001b>?47h\\u001b[1;24r\\u001b[m\\u001b[4l\\u001b[H\\u001b[2JEvery 0.1s: date\\u001b[1;34Hdavor-fastkafka-devel: Tue Feb  7 15:05:41 2023\\u001b[3;1HTue Feb  7 15:05:41 UTC 2023\\u001b[24;80H\\u001b[1;75H2\\u001b[3;19H2\\u001b[24;80H\\u001b[1;75H3\\u001b[3;19H3\\u001b[24;80H\\u001b[1;75H4\\u001b[3;19H4\\u001b[24;80H\\u001b[1;75H5\\u001b[3;19H5\\u001b[24;80H\\u001b[24;1H\\u001b[2J\\u001b[?47l\\u001b8\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"if platform.system() == \\\"Windows\\\":\\n\",\n    \"    code = 'import datetime; print(datetime.datetime.now())'\\n\",\n    \"    proc = await asyncio.create_subprocess_exec(\\n\",\n    \"        sys.executable, '-c', code,\\n\",\n    \"        stdout=asyncio.subprocess.PIPE)\\n\",\n    \"else:\\n\",\n    \"    proc = await asyncio.create_subprocess_exec(\\n\",\n    \"        \\\"watch\\\", \\\"-n\\\", \\\"0.1\\\", \\\"date\\\", stdout=asyncio.subprocess.PIPE\\n\",\n    \"    )\\n\",\n    \"sleep(3)\\n\",\n    \"await terminate_asyncio_process(proc)\\n\",\n    \"outputs, _ = await proc.communicate()\\n\",\n    \"\\n\",\n    \"print(outputs.decode(\\\"utf-8\\\"))\\n\",\n    \"\\n\",\n    \"assert proc.returncode == 0, f\\\"{command} returns {proc.returncode=}, {proc.stderr=}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"52e1c583\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def run_async_subprocesses(\\n\",\n    \"    commands: List[str], commands_args: List[List[Any]], *, sleep_between: int = 0\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Runs multiple async subprocesses.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        commands: A list of commands to execute.\\n\",\n    \"        commands_args: A list of argument lists for each command.\\n\",\n    \"        sleep_between: The sleep duration in seconds between starting each subprocess.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    loop = asyncio.get_event_loop()\\n\",\n    \"\\n\",\n    \"    HANDLED_SIGNALS = (\\n\",\n    \"        signal.SIGINT,  # Unix signal 2. Sent by Ctrl+C.\\n\",\n    \"        signal.SIGTERM,  # Unix signal 15. Sent by `kill <pid>`.\\n\",\n    \"    )\\n\",\n    \"    if platform.system() == \\\"Windows\\\":\\n\",\n    \"        HANDLED_SIGNALS = (*HANDLED_SIGNALS, signal.SIGBREAK)  # type: ignore\\n\",\n    \"\\n\",\n    \"    d = {\\\"should_exit\\\": False}\\n\",\n    \"\\n\",\n    \"    def handle_windows_exit(\\n\",\n    \"        signum: int, frame: Optional[FrameType], d: Dict[str, bool] = d\\n\",\n    \"    ) -> None:\\n\",\n    \"        d[\\\"should_exit\\\"] = True\\n\",\n    \"\\n\",\n    \"    def handle_exit(sig: int, d: Dict[str, bool] = d) -> None:\\n\",\n    \"        d[\\\"should_exit\\\"] = True\\n\",\n    \"\\n\",\n    \"    for sig in HANDLED_SIGNALS:\\n\",\n    \"        if platform.system() == \\\"Windows\\\":\\n\",\n    \"            signal.signal(sig, handle_windows_exit)\\n\",\n    \"        else:\\n\",\n    \"            loop.add_signal_handler(sig, handle_exit, sig)\\n\",\n    \"\\n\",\n    \"    async with asyncer.create_task_group() as tg:\\n\",\n    \"        tasks = []\\n\",\n    \"        for cmd, args in zip(commands, commands_args):\\n\",\n    \"            tasks.append(\\n\",\n    \"                tg.soonify(asyncio.create_subprocess_exec)(\\n\",\n    \"                    cmd,\\n\",\n    \"                    *args,\\n\",\n    \"                    stdout=asyncio.subprocess.PIPE,\\n\",\n    \"                    stdin=asyncio.subprocess.PIPE,\\n\",\n    \"                )\\n\",\n    \"            )\\n\",\n    \"            await asyncio.sleep(sleep_between)\\n\",\n    \"\\n\",\n    \"    procs = [task.value for task in tasks]\\n\",\n    \"\\n\",\n    \"    async def log_output(\\n\",\n    \"        output: Optional[asyncio.StreamReader], pid: int, d: Dict[str, bool] = d\\n\",\n    \"    ) -> None:\\n\",\n    \"        if output is None:\\n\",\n    \"            raise RuntimeError(\\\"Expected StreamReader, got None. Is stdout piped?\\\")\\n\",\n    \"        while not output.at_eof():\\n\",\n    \"            outs = await output.readline()\\n\",\n    \"            if outs != b\\\"\\\":\\n\",\n    \"                typer.echo(f\\\"[{pid:03d}]: \\\" + outs.decode(\\\"utf-8\\\"), nl=False)\\n\",\n    \"\\n\",\n    \"    async with asyncer.create_task_group() as tg:\\n\",\n    \"        for proc in procs:\\n\",\n    \"            tg.soonify(log_output)(proc.stdout, proc.pid)\\n\",\n    \"\\n\",\n    \"        while not d[\\\"should_exit\\\"]:\\n\",\n    \"            await asyncio.sleep(0.2)\\n\",\n    \"\\n\",\n    \"        typer.echo(\\\"Starting process cleanup, this may take a few seconds...\\\")\\n\",\n    \"        for proc in procs:\\n\",\n    \"            tg.soonify(terminate_asyncio_process)(proc)\\n\",\n    \"\\n\",\n    \"    for proc in procs:\\n\",\n    \"        output, _ = await proc.communicate()\\n\",\n    \"        if output:\\n\",\n    \"            typer.echo(f\\\"[{proc.pid:03d}]: \\\" + output.decode(\\\"utf-8\\\"), nl=False)\\n\",\n    \"\\n\",\n    \"    returncodes = [proc.returncode for proc in procs]\\n\",\n    \"    if not returncodes == [0] * len(procs):\\n\",\n    \"        typer.secho(\\n\",\n    \"            f\\\"Return codes are not all zero: {returncodes}\\\",\\n\",\n    \"            err=True,\\n\",\n    \"            fg=typer.colors.RED,\\n\",\n    \"        )\\n\",\n    \"        raise typer.Exit(1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c92cf406\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# async with asyncer.create_task_group() as tg:\\n\",\n    \"#     tg.soonify(run_async_subprocesses)([\\\"watch\\\"]*4, [[\\\"-n\\\", \\\"0.1\\\", \\\"date\\\"]]*4, sleep_between=1)\\n\",\n    \"#     await asyncio.sleep(3)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"804d0df0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# # | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# @contextmanager\\n\",\n    \"# def run_in_process(\\n\",\n    \"#     target: Callable[..., Any]\\n\",\n    \"# ) -> Generator[multiprocessing.Process, None, None]:\\n\",\n    \"#     p = multiprocessing.Process(target=target)\\n\",\n    \"#     try:\\n\",\n    \"#         p.start()\\n\",\n    \"#         yield p\\n\",\n    \"#     except Exception as e:\\n\",\n    \"#         print(f\\\"Exception raised {e=}\\\")\\n\",\n    \"#     finally:\\n\",\n    \"#         p.terminate()\\n\",\n    \"#         p.join()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fe7f93c3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/023_CLI.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a520a022\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _cli\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3f5a4483\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import multiprocessing\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import typer\\n\",\n    \"\\n\",\n    \"from fastkafka import _cli_docs, _cli_testing\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._server import run_fastkafka_server\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"347594e0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import os\\n\",\n    \"import platform\\n\",\n    \"import time\\n\",\n    \"\\n\",\n    \"from typer.testing import CliRunner\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._components.test_dependencies import generate_app_in_tmp\\n\",\n    \"from fastkafka._server import terminate_asyncio_process\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6babc3b9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"44a4e2d5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ae202a18\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__, level=20)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a5ec0fba\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a2bfc60c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"runner = CliRunner()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bf57b082\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"_app = typer.Typer(help=\\\"\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b4830f45\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@_app.command(\\n\",\n    \"    help=\\\"Runs Fast Kafka API application\\\",\\n\",\n    \")\\n\",\n    \"def run(\\n\",\n    \"    num_workers: int = typer.Option(\\n\",\n    \"        multiprocessing.cpu_count(),\\n\",\n    \"        help=\\\"Number of FastKafka instances to run, defaults to number of CPU cores.\\\",\\n\",\n    \"    ),\\n\",\n    \"    app: str = typer.Argument(\\n\",\n    \"        ...,\\n\",\n    \"        help=\\\"input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\\\",\\n\",\n    \"    ),\\n\",\n    \"    kafka_broker: str = typer.Option(\\n\",\n    \"        \\\"localhost\\\",\\n\",\n    \"        help=\\\"kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class.\\\",\\n\",\n    \"    ),\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Runs FastKafka application.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        num_workers (int): Number of FastKafka instances to run, defaults to the number of CPU cores.\\n\",\n    \"        app (str): Input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\\n\",\n    \"        kafka_broker (str): Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        typer.Exit: If there is an unexpected internal error.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        asyncio.run(\\n\",\n    \"            run_fastkafka_server(\\n\",\n    \"                num_workers=num_workers, app=app, kafka_broker=kafka_broker\\n\",\n    \"            )\\n\",\n    \"        )\\n\",\n    \"    except Exception as e:\\n\",\n    \"        typer.secho(f\\\"Unexpected internal error: {e}\\\", err=True, fg=typer.colors.RED)\\n\",\n    \"        raise typer.Exit(1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e3328d21\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"! nbdev_export\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f47cd927\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">run [OPTIONS] APP                                                                                          </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mrun [OPTIONS] APP\\u001b[0m\\u001b[1m                                                                                         \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Runs Fast Kafka API application                                                                                   \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Runs Fast Kafka API application                                                                                   \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #800000; text-decoration-color: #800000\\\">*</span>    app      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  input in the form of 'path:app', where **path** is the path to a python file and **app** is <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     an object of type **FastKafka**.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: None]                                                                            </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #bf7f7f; text-decoration-color: #bf7f7f\\\">[required]                                                                                 </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Arguments \\u001b[0m\\u001b[2m────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[31m*\\u001b[0m    app      \\u001b[1;33mTEXT\\u001b[0m  input in the form of 'path:app', where **path** is the path to a python file and **app** is \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     an object of type **FastKafka**.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2m[default: None]                                                                            \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2;31m[required]                                                                                 \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--num-workers</span>               <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">INTEGER</span>  Number of FastKafka instances to run, defaults to number of CPU cores.     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: 4]                                                          </span>     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--kafka-broker</span>              <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT   </span>  kafka_broker, one of the keys of the kafka_brokers dictionary passed in    <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      the constructor of FastaKafka class.                                       <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: localhost]                                                      </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--install-completion</span>        <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">       </span>  Install completion for the current shell.                                  <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--show-completion</span>           <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">       </span>  Show completion for the current shell, to copy it or customize the         <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                                      installation.                                                              <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>                      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">       </span>  Show this message and exit.                                                <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-num\\u001b[0m\\u001b[1;36m-workers\\u001b[0m               \\u001b[1;33mINTEGER\\u001b[0m  Number of FastKafka instances to run, defaults to number of CPU cores.     \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      \\u001b[2m[default: 4]                                                          \\u001b[0m     \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-kafka\\u001b[0m\\u001b[1;36m-broker\\u001b[0m              \\u001b[1;33mTEXT   \\u001b[0m  kafka_broker, one of the keys of the kafka_brokers dictionary passed in    \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      the constructor of FastaKafka class.                                       \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      \\u001b[2m[default: localhost]                                                      \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-install\\u001b[0m\\u001b[1;36m-completion\\u001b[0m        \\u001b[1;33m       \\u001b[0m  Install completion for the current shell.                                  \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-show\\u001b[0m\\u001b[1;36m-completion\\u001b[0m           \\u001b[1;33m       \\u001b[0m  Show completion for the current shell, to copy it or customize the         \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                                      installation.                                                              \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m                      \\u001b[1;33m       \\u001b[0m  Show this message and exit.                                                \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_app, [\\\"run\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"942f780d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"_app.add_typer(_cli_docs._docs_app, name=\\\"docs\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5fc53859\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"! nbdev_export\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"557347c7\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">root docs install_deps [OPTIONS]                                                                           </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mroot docs install_deps [OPTIONS]\\u001b[0m\\u001b[1m                                                                          \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Installs dependencies for FastKafka documentation generation                                                      \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Installs dependencies for FastKafka documentation generation                                                      \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>          Show this message and exit.                                                                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m          Show this message and exit.                                                                     \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_app, [\\\"docs\\\", \\\"install_deps\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7ed5d781\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_app, [\\\"docs\\\", \\\"install_deps\\\"])\\n\",\n    \"assert result.exit_code == 0, f\\\"exit_code = {result.exit_code}, output = {result.stdout}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1cc8e68a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">root docs generate [OPTIONS] APP                                                                           </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mroot docs generate [OPTIONS] APP\\u001b[0m\\u001b[1m                                                                          \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Generates documentation for a FastKafka application                                                               \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Generates documentation for a FastKafka application                                                               \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #800000; text-decoration-color: #800000\\\">*</span>    app      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  input in the form of 'path:app', where **path** is the path to a python file and **app** is <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     an object of type **FastKafka**.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: None]                                                                            </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #bf7f7f; text-decoration-color: #bf7f7f\\\">[required]                                                                                 </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Arguments \\u001b[0m\\u001b[2m────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[31m*\\u001b[0m    app      \\u001b[1;33mTEXT\\u001b[0m  input in the form of 'path:app', where **path** is the path to a python file and **app** is \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     an object of type **FastKafka**.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2m[default: None]                                                                            \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2;31m[required]                                                                                 \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--root-path</span>        <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  root path under which documentation will be created; default is current directory      <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">    </span>  Show this message and exit.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-root\\u001b[0m\\u001b[1;36m-path\\u001b[0m        \\u001b[1;33mTEXT\\u001b[0m  root path under which documentation will be created; default is current directory      \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m             \\u001b[1;33m    \\u001b[0m  Show this message and exit.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_app, [\\\"docs\\\", \\\"generate\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4a759342\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.asyncapi: Old async specifications at '/tmp/tmp7598io9j/asyncapi/spec/asyncapi.yml' does not exist.\\n\",\n      \"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/tmp/tmp7598io9j/asyncapi/spec/asyncapi.yml'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/tmp/tmp7598io9j/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with generate_app_in_tmp() as import_str:\\n\",\n    \"    result = runner.invoke(_app, [\\\"docs\\\", \\\"generate\\\", import_str])\\n\",\n    \"    typer.echo(result.output)\\n\",\n    \"    assert result.exit_code == 0, f\\\"exit_code = {result.exit_code}, output = {result.output}\\\"\\n\",\n    \"\\n\",\n    \"    result = runner.invoke(_app, [\\\"docs\\\", \\\"generate\\\", import_str])\\n\",\n    \"    typer.echo(result.output)\\n\",\n    \"    assert result.exit_code == 0, f\\\"exit_code = {result.exit_code}, output = {result.output}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e81b95fe\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">root docs serve [OPTIONS] APP                                                                              </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mroot docs serve [OPTIONS] APP\\u001b[0m\\u001b[1m                                                                             \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Generates and serves documentation for a FastKafka application                                                    \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Generates and serves documentation for a FastKafka application                                                    \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #800000; text-decoration-color: #800000\\\">*</span>    app      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  input in the form of 'path:app', where **path** is the path to a python file and **app** is <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     an object of type **FastKafka**.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: None]                                                                            </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #bf7f7f; text-decoration-color: #bf7f7f\\\">[required]                                                                                 </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Arguments \\u001b[0m\\u001b[2m────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[31m*\\u001b[0m    app      \\u001b[1;33mTEXT\\u001b[0m  input in the form of 'path:app', where **path** is the path to a python file and **app** is \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     an object of type **FastKafka**.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2m[default: None]                                                                            \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2;31m[required]                                                                                 \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--root-path</span>        <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT   </span>  root path under which documentation will be created; default is current directory   <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--bind</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT   </span>  Some info <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: 127.0.0.1]</span>                                                      <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--port</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">INTEGER</span>  Some info <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: 8000]</span>                                                           <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">       </span>  Show this message and exit.                                                         <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-root\\u001b[0m\\u001b[1;36m-path\\u001b[0m        \\u001b[1;33mTEXT   \\u001b[0m  root path under which documentation will be created; default is current directory   \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-bind\\u001b[0m             \\u001b[1;33mTEXT   \\u001b[0m  Some info \\u001b[2m[default: 127.0.0.1]\\u001b[0m                                                      \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-port\\u001b[0m             \\u001b[1;33mINTEGER\\u001b[0m  Some info \\u001b[2m[default: 8000]\\u001b[0m                                                           \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m             \\u001b[1;33m       \\u001b[0m  Show this message and exit.                                                         \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_app, [\\\"docs\\\", \\\"serve\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d2a20790\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with generate_app_in_tmp() as app:\\n\",\n    \"    proc = await asyncio.create_subprocess_exec(\\n\",\n    \"        \\\"fastkafka\\\",\\n\",\n    \"        \\\"docs\\\",\\n\",\n    \"        \\\"serve\\\",\\n\",\n    \"        \\\"--port=48000\\\",\\n\",\n    \"        app,\\n\",\n    \"        stdout=asyncio.subprocess.PIPE,\\n\",\n    \"    )\\n\",\n    \"    time.sleep(120)\\n\",\n    \"    await terminate_asyncio_process(proc)\\n\",\n    \"    outputs, errs = await proc.communicate()\\n\",\n    \"    expected_returncode = 15 if platform.system() == \\\"Windows\\\" else 0\\n\",\n    \"    assert proc.returncode == expected_returncode, f\\\"output = {outputs.decode('utf-8')}\\\\n exit code = {proc.returncode}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9f1ec310\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"_app.add_typer(_cli_testing._testing_app, name=\\\"testing\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"447b94f1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">root testing install_deps [OPTIONS]                                                                        </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mroot testing install_deps [OPTIONS]\\u001b[0m\\u001b[1m                                                                       \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Installs dependencies for FastKafka app testing                                                                   \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Installs dependencies for FastKafka app testing                                                                   \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>          Show this message and exit.                                                                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m          Show this message and exit.                                                                     \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_app, [\\\"testing\\\", \\\"install_deps\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a5a0b955\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"3f1cd496ac314d81aa61242a62545e93\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"  0%|          | 0/833975 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_app, [\\\"testing\\\", \\\"install_deps\\\"])\\n\",\n    \"assert result.exit_code == 0, f\\\"exit_code = {result.exit_code}, output = {result.output}\\\"\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/024_CLI_Docs.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e6067a2f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _cli_docs\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ff086fc9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import platform\\n\",\n    \"import signal\\n\",\n    \"import socketserver\\n\",\n    \"from http.server import SimpleHTTPRequestHandler\\n\",\n    \"from pathlib import Path\\n\",\n    \"from types import FrameType\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import typer\\n\",\n    \"\\n\",\n    \"from fastkafka._components.docs_dependencies import (\\n\",\n    \"    _check_npm_with_local,\\n\",\n    \"    _install_docs_npm_deps,\\n\",\n    \"    _install_node,\\n\",\n    \")\\n\",\n    \"from fastkafka._components.helpers import _import_from_string, change_dir\\n\",\n    \"from fastkafka._components.logger import get_logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7308ae66\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import time\\n\",\n    \"\\n\",\n    \"from typer.testing import CliRunner\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._components.test_dependencies import generate_app_in_tmp\\n\",\n    \"from fastkafka._server import terminate_asyncio_process\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2cf6183c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6c873369\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1d072658\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7da7962a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8f1b231d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"runner = CliRunner()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9b11fc9c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"_docs_app = typer.Typer(help=\\\"Commands for managing FastKafka app documentation\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bb880142\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@_docs_app.command(\\n\",\n    \"    \\\"install_deps\\\",\\n\",\n    \"    help=\\\"Installs dependencies for FastKafka documentation generation\\\",\\n\",\n    \")\\n\",\n    \"def docs_install_deps() -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Installs dependencies for FastKafka documentation generation.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        typer.Abort: If the user chooses not to install NodeJS and npm locally.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        _check_npm_with_local()\\n\",\n    \"    except Exception as e:\\n\",\n    \"        typer.secho(f\\\"Unexpected internal error: {e}\\\", err=True, fg=typer.colors.RED)\\n\",\n    \"        install_confirm = typer.confirm(\\n\",\n    \"            \\\"npm not found or version is too low, do you want us to install the NodeJS and npm locally?\\\"\\n\",\n    \"        )\\n\",\n    \"        if install_confirm is False:\\n\",\n    \"            print(\\\"Not installing NodeJS and npm locally, exiting..\\\")\\n\",\n    \"            raise typer.Abort()\\n\",\n    \"        else:\\n\",\n    \"            _install_node()\\n\",\n    \"    asyncio.run(_install_docs_npm_deps())\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@_docs_app.command(\\n\",\n    \"    \\\"generate\\\",\\n\",\n    \"    help=\\\"Generates documentation for a FastKafka application\\\",\\n\",\n    \")\\n\",\n    \"def generate_docs(\\n\",\n    \"    root_path: Optional[str] = typer.Option(\\n\",\n    \"        default=None,\\n\",\n    \"        help=\\\"root path under which documentation will be created; default is current directory\\\",\\n\",\n    \"        show_default=False,\\n\",\n    \"    ),\\n\",\n    \"    app: str = typer.Argument(\\n\",\n    \"        ...,\\n\",\n    \"        help=\\\"input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\\\",\\n\",\n    \"    ),\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Generates documentation for a FastKafka application.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        root_path: The root path under which the documentation will be created.\\n\",\n    \"            Default is the current directory.\\n\",\n    \"        app: Input in the form of 'path:app', where **path** is the path to a python\\n\",\n    \"            file and **app** is an object of type **FastKafka**.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        typer.Exit: If there is an unexpected internal error.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        application = _import_from_string(app)\\n\",\n    \"        if root_path is not None:\\n\",\n    \"            application._root_path = Path(root_path)\\n\",\n    \"            application._asyncapi_path = application._root_path / \\\"asyncapi\\\"\\n\",\n    \"\\n\",\n    \"        application.skip_docs = False\\n\",\n    \"        application.create_docs()\\n\",\n    \"    except Exception as e:\\n\",\n    \"        typer.secho(f\\\"Unexpected internal error: {e}\\\", err=True, fg=typer.colors.RED)\\n\",\n    \"        raise typer.Exit(1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@_docs_app.command(\\n\",\n    \"    \\\"serve\\\",\\n\",\n    \"    help=\\\"Generates and serves documentation for a FastKafka application\\\",\\n\",\n    \")\\n\",\n    \"def serve_docs(\\n\",\n    \"    root_path: str = typer.Option(\\n\",\n    \"        default=None,\\n\",\n    \"        help=\\\"root path under which documentation will be created; default is current directory\\\",\\n\",\n    \"        show_default=False,\\n\",\n    \"    ),\\n\",\n    \"    bind: str = typer.Option(\\\"127.0.0.1\\\", help=\\\"Some info\\\"),\\n\",\n    \"    port: int = typer.Option(8000, help=\\\"Some info\\\"),\\n\",\n    \"    app: str = typer.Argument(\\n\",\n    \"        ...,\\n\",\n    \"        help=\\\"input in the form of 'path:app', where **path** is the path to a python file and **app** is an object of type **FastKafka**.\\\",\\n\",\n    \"    ),\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Generates and serves documentation for a FastKafka application.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        root_path: The root path under which the documentation will be created.\\n\",\n    \"            Default is the current directory.\\n\",\n    \"        bind: The IP address to bind the server to. Default is '127.0.0.1'.\\n\",\n    \"        port: The port number to bind the server to. Default is 8000.\\n\",\n    \"        app: Input in the form of 'path:app', where **path** is the path to a python\\n\",\n    \"            file and **app** is an object of type **FastKafka**.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        typer.Exit: If there is an unexpected internal error.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        application = _import_from_string(app)\\n\",\n    \"        if root_path is not None:\\n\",\n    \"            application._root_path = Path(root_path)\\n\",\n    \"            application._asyncapi_path = application._root_path / \\\"asyncapi\\\"\\n\",\n    \"\\n\",\n    \"        application.create_docs()\\n\",\n    \"        with change_dir(str(application._asyncapi_path / \\\"docs\\\")):\\n\",\n    \"            server_address = (bind, port)\\n\",\n    \"            handler = SimpleHTTPRequestHandler\\n\",\n    \"\\n\",\n    \"            d = {\\\"should_stop\\\": False}\\n\",\n    \"\\n\",\n    \"            def sigint_handler(\\n\",\n    \"                signal: int, frame: Optional[FrameType], d: Dict[str, bool] = d\\n\",\n    \"            ) -> None:\\n\",\n    \"                d[\\\"should_stop\\\"] = True\\n\",\n    \"\\n\",\n    \"            signal.signal(signal.SIGINT, sigint_handler)\\n\",\n    \"            signal.signal(signal.SIGTERM, sigint_handler)\\n\",\n    \"            if platform.system() == \\\"Windows\\\":\\n\",\n    \"                signal.signal(signal.SIGBREAK, sigint_handler) # type: ignore\\n\",\n    \"\\n\",\n    \"            with socketserver.TCPServer(server_address, handler) as httpd:\\n\",\n    \"                httpd.timeout = 0.1\\n\",\n    \"                typer.secho(\\n\",\n    \"                    f\\\"Serving documentation on http://{server_address[0]}:{server_address[1]}\\\"\\n\",\n    \"                )\\n\",\n    \"                while not d[\\\"should_stop\\\"]:\\n\",\n    \"                    httpd.handle_request()\\n\",\n    \"                typer.secho(f\\\"Interupting serving of documentation and cleaning up...\\\")\\n\",\n    \"    except Exception as e:\\n\",\n    \"        typer.secho(f\\\"Unexpected internal error: {e}\\\", err=True, fg=typer.colors.RED)\\n\",\n    \"        raise typer.Exit(1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"81371542\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"! nbdev_export\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6f578155\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">root install_deps [OPTIONS]                                                                                </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mroot install_deps [OPTIONS]\\u001b[0m\\u001b[1m                                                                               \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Installs dependencies for FastKafka documentation generation                                                      \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Installs dependencies for FastKafka documentation generation                                                      \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>          Show this message and exit.                                                                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m          Show this message and exit.                                                                     \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_docs_app, [\\\"install_deps\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"12c1ad43\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_docs_app, [\\\"install_deps\\\"])\\n\",\n    \"assert result.exit_code in [0,1], result.exit_code\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8d436308\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">root generate [OPTIONS] APP                                                                                </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mroot generate [OPTIONS] APP\\u001b[0m\\u001b[1m                                                                               \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Generates documentation for a FastKafka application                                                               \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Generates documentation for a FastKafka application                                                               \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #800000; text-decoration-color: #800000\\\">*</span>    app      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  input in the form of 'path:app', where **path** is the path to a python file and **app** is <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     an object of type **FastKafka**.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: None]                                                                            </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #bf7f7f; text-decoration-color: #bf7f7f\\\">[required]                                                                                 </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Arguments \\u001b[0m\\u001b[2m────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[31m*\\u001b[0m    app      \\u001b[1;33mTEXT\\u001b[0m  input in the form of 'path:app', where **path** is the path to a python file and **app** is \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     an object of type **FastKafka**.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2m[default: None]                                                                            \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2;31m[required]                                                                                 \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--root-path</span>        <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  root path under which documentation will be created <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: .]</span>                       <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">    </span>  Show this message and exit.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-root\\u001b[0m\\u001b[1;36m-path\\u001b[0m        \\u001b[1;33mTEXT\\u001b[0m  root path under which documentation will be created \\u001b[2m[default: .]\\u001b[0m                       \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m             \\u001b[1;33m    \\u001b[0m  Show this message and exit.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_docs_app, [\\\"generate\\\", \\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d2c16070\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.asyncapi: Old async specifications at '/tmp/tmpqvalk_fi/asyncapi/spec/asyncapi.yml' does not exist.\\n\",\n      \"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/tmp/tmpqvalk_fi/asyncapi/spec/asyncapi.yml'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/tmp/tmpqvalk_fi/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with generate_app_in_tmp() as import_str:\\n\",\n    \"    result = runner.invoke(_docs_app, [\\\"generate\\\", import_str])\\n\",\n    \"    typer.echo(result.output)\\n\",\n    \"    assert result.exit_code in [0,1], result.exit_code\\n\",\n    \"\\n\",\n    \"    result = runner.invoke(_docs_app, [\\\"generate\\\", import_str])\\n\",\n    \"    typer.echo(result.output)\\n\",\n    \"    assert result.exit_code in [0,1], result.exit_code\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"27500e8b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">root serve [OPTIONS] APP                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1mroot serve [OPTIONS] APP\\u001b[0m\\u001b[1m                                                                                  \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Generates and serves documentation for a FastKafka application                                                    \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Generates and serves documentation for a FastKafka application                                                    \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #800000; text-decoration-color: #800000\\\">*</span>    app      <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT</span>  input in the form of 'path:app', where **path** is the path to a python file and **app** is <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     an object of type **FastKafka**.                                                            <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: None]                                                                            </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>                     <span style=\\\"color: #bf7f7f; text-decoration-color: #bf7f7f\\\">[required]                                                                                 </span> <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Arguments \\u001b[0m\\u001b[2m────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[31m*\\u001b[0m    app      \\u001b[1;33mTEXT\\u001b[0m  input in the form of 'path:app', where **path** is the path to a python file and **app** is \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     an object of type **FastKafka**.                                                            \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2m[default: None]                                                                            \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m                     \\u001b[2;31m[required]                                                                                 \\u001b[0m \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--root-path</span>        <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT   </span>  root path under which documentation will be created <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: .]</span>                    <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--bind</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">TEXT   </span>  Some info <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: 127.0.0.1]</span>                                                      <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--port</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">INTEGER</span>  Some info <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">[default: 8000]</span>                                                           <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>             <span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">       </span>  Show this message and exit.                                                         <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-root\\u001b[0m\\u001b[1;36m-path\\u001b[0m        \\u001b[1;33mTEXT   \\u001b[0m  root path under which documentation will be created \\u001b[2m[default: .]\\u001b[0m                    \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-bind\\u001b[0m             \\u001b[1;33mTEXT   \\u001b[0m  Some info \\u001b[2m[default: 127.0.0.1]\\u001b[0m                                                      \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-port\\u001b[0m             \\u001b[1;33mINTEGER\\u001b[0m  Some info \\u001b[2m[default: 8000]\\u001b[0m                                                           \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m             \\u001b[1;33m       \\u001b[0m  Show this message and exit.                                                         \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_docs_app, [\\\"serve\\\", \\\"--help\\\"])\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/025_CLI_Testing.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a88cb80e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _cli_testing\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"880103ce\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import typer\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.test_dependencies import _install_testing_deps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"92d2ac1c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from typer.testing import CliRunner\\n\",\n    \"\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f88fbd7f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e0ba7950\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b5192019\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"66f02714\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c66c9659\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"runner = CliRunner()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"56164ae5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"_testing_app = typer.Typer(help=\\\"Commands for managing FastKafka testing\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ba6589be\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@_testing_app.command(\\n\",\n    \"    \\\"install_deps\\\",\\n\",\n    \"    help=\\\"Installs dependencies for FastKafka app testing\\\",\\n\",\n    \")\\n\",\n    \"def testing_install_deps() -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Installs dependencies for FastKafka app testing.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        typer.Exit: If there is an unexpected internal error.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        _install_testing_deps()\\n\",\n    \"    except Exception as e:\\n\",\n    \"        typer.secho(f\\\"Unexpected internal error: {e}\\\", err=True, fg=typer.colors.RED)\\n\",\n    \"        raise typer.Exit(1)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0219b3f5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\"> </span><span style=\\\"color: #808000; text-decoration-color: #808000; font-weight: bold\\\">Usage: </span><span style=\\\"font-weight: bold\\\">install_deps [OPTIONS]                                                                                     </span>\\n\",\n       \"<span style=\\\"font-weight: bold\\\">                                                                                                                   </span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\",\n       \"\\u001b[1m \\u001b[0m\\u001b[1;33mUsage: \\u001b[0m\\u001b[1minstall_deps [OPTIONS]\\u001b[0m\\u001b[1m                                                                                    \\u001b[0m\\u001b[1m \\u001b[0m\\n\",\n       \"\\u001b[1m                                                                                                                   \\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"> Installs dependencies for FastKafka app testing                                                                   \\n\",\n       \"                                                                                                                   \\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \" Installs dependencies for FastKafka app testing                                                                   \\n\",\n       \"                                                                                                                   \\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"><span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--install-completion</span>          Install completion for the current shell.                                         <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--show-completion</span>             Show completion for the current shell, to copy it or customize the installation.  <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span> <span style=\\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\\">--help</span>                        Show this message and exit.                                                       <span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">│</span>\\n\",\n       \"<span style=\\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\\">╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</span>\\n\",\n       \"</pre>\\n\"\n      ],\n      \"text/plain\": [\n       \"\\u001b[2m╭─\\u001b[0m\\u001b[2m Options \\u001b[0m\\u001b[2m──────────────────────────────────────────────────────────────────────────────────────────────────────\\u001b[0m\\u001b[2m─╮\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-install\\u001b[0m\\u001b[1;36m-completion\\u001b[0m          Install completion for the current shell.                                         \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-show\\u001b[0m\\u001b[1;36m-completion\\u001b[0m             Show completion for the current shell, to copy it or customize the installation.  \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m│\\u001b[0m \\u001b[1;36m-\\u001b[0m\\u001b[1;36m-help\\u001b[0m                        Show this message and exit.                                                       \\u001b[2m│\\u001b[0m\\n\",\n       \"\\u001b[2m╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯\\u001b[0m\\n\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_testing_app, [\\\"--help\\\"])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a8c064b2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"result = runner.invoke(_testing_app)\\n\",\n    \"assert result.exit_code == 0\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/096_Docusaurus_Helper.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"id\": \"e7f22162\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _docusaurus_helper\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e51a8972\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Docusaurus Helper\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"id\": \"aca4b3f1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import itertools\\n\",\n    \"import re\\n\",\n    \"import ast\\n\",\n    \"import types\\n\",\n    \"from inspect import Signature, getmembers, isclass, isfunction, signature, ismethod, getsource, Parameter\\n\",\n    \"from pathlib import Path\\n\",\n    \"from typing import *\\n\",\n    \"from urllib.parse import urljoin\\n\",\n    \"from functools import lru_cache\\n\",\n    \"\\n\",\n    \"import typer\\n\",\n    \"from docstring_parser import parse\\n\",\n    \"from docstring_parser.common import DocstringParam, DocstringRaises, DocstringReturns, Docstring\\n\",\n    \"from nbdev.config import get_config\\n\",\n    \"from nbdev.quarto import nbdev_readme\\n\",\n    \"from nbdev.doclinks import NbdevLookup, patch_name, L, _find_mod\\n\",\n    \"from nbdev_mkdocs.mkdocs import (\\n\",\n    \"    _add_all_submodules,\\n\",\n    \"    _import_all_members,\\n\",\n    \"    _import_functions_and_classes,\\n\",\n    \"    _import_submodules\\n\",\n    \")\\n\",\n    \"from nbdev_mkdocs._helpers.doc_links_utils import fix_sym_links as update_default_symbol_links\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 7,\n   \"id\": \"4ab4b0cd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import sys\\n\",\n    \"import functools\\n\",\n    \"import random\\n\",\n    \"import shutil\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from contextlib import contextmanager\\n\",\n    \"from abc import abstractmethod\\n\",\n    \"from unittest.mock import patch, MagicMock\\n\",\n    \"import textwrap\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"import pytest\\n\",\n    \"from pydantic import BaseModel\\n\",\n    \"from aiokafka import ConsumerRecord\\n\",\n    \"from aiokafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor\\n\",\n    \"\\n\",\n    \"from fastkafka._components.asyncapi import ConsumeCallable\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"23776e14\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_return_annotation(s: Signature) -> str:\\n\",\n    \"    \\\"\\\"\\\"Get the return annotation from the function signature.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        s: The signature of the function from which the annotations must be extracted.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The return annotation, or an empty string if not available.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if s.return_annotation == None or \\\"inspect._empty\\\" in str(s.return_annotation):\\n\",\n    \"        return \\\"\\\"\\n\",\n    \"    if isinstance(s.return_annotation, str):\\n\",\n    \"        return s.return_annotation\\n\",\n    \"    ret_val: str = (\\n\",\n    \"        str(s.return_annotation).replace(\\\"typing.\\\", \\\"\\\").replace(\\\"NoneType\\\", \\\"None\\\")\\n\",\n    \"        if \\\"typing.\\\" in str(s.return_annotation)\\n\",\n    \"        else str(s.return_annotation.__name__)\\n\",\n    \"    )\\n\",\n    \"    return ret_val\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"727346e4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# def fixture() -> Callable[[ConsumeCallable], ConsumeCallable]:\\n\",\n    \"#     pass\\n\",\n    \"\\n\",\n    \"# _signature = signature(fixture)\\n\",\n    \"# actual = _get_return_annotation(_signature)\\n\",\n    \"# expected = \\\"Callable[[ConsumeCallable], ConsumeCallable]\\\"\\n\",\n    \"# print(actual)\\n\",\n    \"\\n\",\n    \"# assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e18ddea5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"\\n\",\n    \"def fixture():\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0281b48b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Callable[[aiokafka.structs.ConsumerRecord], Awaitable[None]]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"\\n\",\n    \"def fixture() -> Callable[[ConsumerRecord], Awaitable[None]]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"Callable[[aiokafka.structs.ConsumerRecord], Awaitable[None]]\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a22acf60\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> Callable[[\\\"FastAPI\\\"], AsyncIterator[None]]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b3a0fc1c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"EventMetadata\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> \\\"EventMetadata\\\":\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"EventMetadata\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a61a1211\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Optional[str]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> Optional[str]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = (\\n\",\n    \"    \\\"Union[str, None]\\\"\\n\",\n    \"    if f\\\"{sys.version_info.major}.{sys.version_info.minor}\\\" == \\\"3.8\\\"\\n\",\n    \"    else \\\"Optional[str]\\\"\\n\",\n    \")\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected, expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4110e572\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Iterable[str]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> Iterable[str]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"Iterable[str]\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d8667128\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"14bf2727\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"str\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"str\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f8a74b6b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"List[int]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> List[int]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"List[int]\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0d376675\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Dict[str, Any]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture() -> Dict[str, Any]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"Dict[str, Any]\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"067d1134\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Union[str, List[str]]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"\\n\",\n    \"def fixture() -> Union[str, List[str]]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"Union[str, List[str]]\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"73960f27\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"A\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"class A:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def fixture() -> A:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"A\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"51b09ae5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Callable[[], Any]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"\\n\",\n    \"\\n\",\n    \"def fixture() -> Callable[[], Any]:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_return_annotation(_signature)\\n\",\n    \"expected = \\\"Callable[[], Any]\\\"\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dffc79a4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_param_annotation(param: Parameter) -> str:\\n\",\n    \"    \\\"\\\"\\\"Get the annotation of a function parameter.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        param: The parameter object.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The parameter annotation, or an empty string if not available.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    if \\\"typing.\\\" in str(param.annotation):\\n\",\n    \"        return f'`{str(param.annotation).replace(\\\"typing.\\\", \\\"\\\")}`'\\n\",\n    \"    elif isinstance(param.annotation, str):\\n\",\n    \"        return param.annotation\\n\",\n    \"    else:\\n\",\n    \"        return (\\n\",\n    \"            \\\"\\\"\\n\",\n    \"            if param.annotation.__name__ == \\\"_empty\\\"\\n\",\n    \"            else f\\\"`{param.annotation.__name__}`\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"17cce765\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"['`int`', '`Optional[str]`', '`Dict[str, int]`', 'Any']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def f(a: int, b: Optional[str], c: Dict[str, int], d: \\\"Any\\\"):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"s = signature(f)\\n\",\n    \"\\n\",\n    \"actual = []\\n\",\n    \"expected = (\\n\",\n    \"    ['`int`', '`Union[str, NoneType]`', '`Dict[str, int]`', 'Any']\\n\",\n    \"    if f\\\"{sys.version_info.major}.{sys.version_info.minor}\\\" == \\\"3.8\\\"\\n\",\n    \"    else ['`int`', '`Optional[str]`', '`Dict[str, int]`', 'Any']\\n\",\n    \")\\n\",\n    \"for param in s.parameters.values():\\n\",\n    \"    actual.append(_get_param_annotation(param))\\n\",\n    \"    \\n\",\n    \"print(actual)\\n\",\n    \"assert sorted(actual) == sorted(expected)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"47aceb4d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _get_default_value(param: Parameter) -> str:\\n\",\n    \"    \\\"\\\"\\\"Get the default value of the function parameter.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        param: The parameter object.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The default value of the function parameter.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if param.default is param.empty:\\n\",\n    \"        return \\\"*required*\\\"\\n\",\n    \"    \\n\",\n    \"    return f\\\"`'{param.default}'`\\\" if isinstance(param.default, str) else f\\\"`{param.default}`\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"53835059\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"['*required*', \\\"`'default_string'`\\\", '`{}`', \\\"`(1, 'string', 2.0)`\\\"]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture(\\n\",\n    \"    arg_1: int,\\n\",\n    \"    *,\\n\",\n    \"    arg_2: str = \\\"default_string\\\",\\n\",\n    \"    arg_3: Dict[str, int] = {},\\n\",\n    \"    arg_4: Tuple[int, str, float] = (1, \\\"string\\\", 2.0),\\n\",\n    \") -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"s = signature(fixture)\\n\",\n    \"expected = ['*required*', \\\"`'default_string'`\\\", '`{}`', \\\"`(1, 'string', 2.0)`\\\"]\\n\",\n    \"actual = []\\n\",\n    \"for param in s.parameters.values():\\n\",\n    \"    actual.append(_get_default_value(param))\\n\",\n    \"    \\n\",\n    \"print(actual)\\n\",\n    \"assert sorted(actual) == sorted(expected)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1c7a07ca\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_params_annotation(s: Signature) -> Dict[str, Dict[str, str]]:\\n\",\n    \"    \\\"\\\"\\\"Get the annotations along with its default values for the parameters of the symbol.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        s: The signature of the function from which the annotations must be extracted.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The parameter annotations along with its default value.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return {\\n\",\n    \"        f\\\"{param.name}\\\": {\\n\",\n    \"            \\\"type\\\": _get_param_annotation(param),\\n\",\n    \"            \\\"default\\\": _get_default_value(param),\\n\",\n    \"        }\\n\",\n    \"        for param in s.parameters.values()\\n\",\n    \"    }\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0d361cd4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'arg_1': {'type': '`int`', 'default': '*required*'},\\n\",\n       \" 'arg_2': {'type': '`str`', 'default': \\\"`'default_string'`\\\"},\\n\",\n       \" 'arg_3': {'type': '`Dict[str, int]`', 'default': '`{}`'},\\n\",\n       \" 'arg_4': {'type': '`Optional[float]`', 'default': '`None`'},\\n\",\n       \" 'arg_5': {'type': '`Tuple[int, str, float]`',\\n\",\n       \"  'default': \\\"`(1, 'string', 2.0)`\\\"},\\n\",\n       \" 'arg_6': {'type': '`List[Union[int, str]]`', 'default': \\\"`[1, 'string']`\\\"},\\n\",\n       \" 'arg_7': {'type': '`Set[int]`', 'default': '`{1, 2, 3}`'},\\n\",\n       \" 'arg_8': {'type': '`str`', 'default': \\\"`'string'`\\\"},\\n\",\n       \" 'arg_9': {'type': '`Dict[str, str]`', 'default': '`{}`'},\\n\",\n       \" 'arg_10': {'type': '', 'default': '`None`'}}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"def fixture(\\n\",\n    \"    arg_1: int,\\n\",\n    \"    *,\\n\",\n    \"    arg_2: str = \\\"default_string\\\",\\n\",\n    \"    arg_3: Dict[str, int] = {},\\n\",\n    \"    arg_4: Optional[float] = None,\\n\",\n    \"    arg_5: Tuple[int, str, float] = (1, \\\"string\\\", 2.0),\\n\",\n    \"    arg_6: List[Union[int, str]] = [1, \\\"string\\\"],\\n\",\n    \"    arg_7: Set[int] = {1, 2, 3},\\n\",\n    \"    arg_8: str = \\\"string\\\",\\n\",\n    \"    arg_9: Dict[str, str] = {},\\n\",\n    \"    arg_10 = None\\n\",\n    \") -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture)\\n\",\n    \"actual = _get_params_annotation(_signature)\\n\",\n    \"\\n\",\n    \"_optional_type = \\\"Union[float, NoneType]\\\" if f\\\"{sys.version_info.major}.{sys.version_info.minor}\\\" == \\\"3.8\\\" else \\\"Optional[float]\\\"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"arg_1\\\": {\\\"type\\\": \\\"`int`\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"    \\\"arg_2\\\": {\\\"type\\\": \\\"`str`\\\", \\\"default\\\": \\\"`'default_string'`\\\"},\\n\",\n    \"    \\\"arg_3\\\": {\\\"type\\\": \\\"`Dict[str, int]`\\\", \\\"default\\\": \\\"`{}`\\\"},\\n\",\n    \"    \\\"arg_4\\\": {\\\"type\\\": f\\\"`{_optional_type}`\\\", \\\"default\\\": \\\"`None`\\\"},\\n\",\n    \"    \\\"arg_5\\\": {\\\"type\\\": \\\"`Tuple[int, str, float]`\\\", \\\"default\\\": \\\"`(1, 'string', 2.0)`\\\"},\\n\",\n    \"    \\\"arg_6\\\": {\\\"type\\\": \\\"`List[Union[int, str]]`\\\", \\\"default\\\": \\\"`[1, 'string']`\\\"},\\n\",\n    \"    \\\"arg_7\\\": {\\\"type\\\": \\\"`Set[int]`\\\", \\\"default\\\": \\\"`{1, 2, 3}`\\\"},\\n\",\n    \"    \\\"arg_8\\\": {\\\"type\\\": \\\"`str`\\\", \\\"default\\\": \\\"`'string'`\\\"},\\n\",\n    \"    \\\"arg_9\\\": {\\\"type\\\": \\\"`Dict[str, str]`\\\", \\\"default\\\": \\\"`{}`\\\"},\\n\",\n    \"    \\\"arg_10\\\": {\\\"type\\\": \\\"\\\", \\\"default\\\": \\\"`None`\\\"},\\n\",\n    \"}\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f470e0a0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _generate_parameters_table(\\n\",\n    \"    symbol_annotations: Dict[str, Union[Dict[str, str], str]],\\n\",\n    \"    section_items: Union[List[DocstringParam]],\\n\",\n    \"    section_name: str,\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Generate parameter table in markdown format\\n\",\n    \"    \\n\",\n    \"    Args:\\n\",\n    \"        symbol_annotations: Symbol annotations along with its default value\\n\",\n    \"        section_items: The parameter section of a parsed docstring\\n\",\n    \"        section_name: The name of the section\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The parameters of a symbol in markdown-formatted string\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    nl = \\\"\\\\n\\\"\\n\",\n    \"    _section_template = (\\n\",\n    \"        \\\"|  Name | Type | Description | Default |\\\\n|---|---|---|---|\\\\n{section_body}\\\\n\\\"\\n\",\n    \"    )\\n\",\n    \"    section_body = \\\"\\\".join(\\n\",\n    \"        [\\n\",\n    \"            f'| `{section.arg_name}` | {symbol_annotations[\\\"parameters\\\"][section.arg_name][\\\"type\\\"]} | {section.description.replace(nl, \\\"\\\")} | {symbol_annotations[\\\"parameters\\\"][section.arg_name][\\\"default\\\"]} |\\\\n' # type: ignore\\n\",\n    \"            if section.arg_name in symbol_annotations[\\\"parameters\\\"]\\n\",\n    \"            else \\\"\\\"\\n\",\n    \"            for section in section_items\\n\",\n    \"        ]\\n\",\n    \"    )\\n\",\n    \"    return f\\\"**{section_name}**:\\\\n\\\\n\\\" + _section_template.format(\\n\",\n    \"        section_body=section_body,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"70f444db\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Parameters**:\\n\",\n      \"\\n\",\n      \"|  Name | Type | Description | Default |\\n\",\n      \"|---|---|---|---|\\n\",\n      \"| `name` | str | name of the person | *required* |\\n\",\n      \"| `age` | int | age of the person | *required* |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"params_dict = {\\n\",\n    \"    \\\"name\\\": {\\\"type\\\": \\\"str\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"    \\\"age\\\": {\\\"type\\\": \\\"int\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"}\\n\",\n    \"return_dict = \\\"str\\\"\\n\",\n    \"signature_dict = {\\\"parameters\\\": params_dict, \\\"return\\\": return_dict}\\n\",\n    \"\\n\",\n    \"fixture_docstring = parse(\\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        age: age of the person\\n\",\n    \"    \\\"\\\"\\\")\\n\",\n    \"\\n\",\n    \"actual = _generate_parameters_table(\\n\",\n    \"    signature_dict, fixture_docstring.params, \\\"Parameters\\\"\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"**Parameters**:\\n\",\n    \"\\n\",\n    \"|  Name | Type | Description | Default |\\n\",\n    \"|---|---|---|---|\\n\",\n    \"| `name` | str | name of the person | *required* |\\n\",\n    \"| `age` | int | age of the person | *required* |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cf5f49d4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _generate_return_and_raises_table(\\n\",\n    \"    symbol_annotations: Dict[str, Union[Dict[str, str], str]],\\n\",\n    \"    section_items: Union[List[DocstringReturns], List[DocstringRaises]],\\n\",\n    \"    section_name: str,\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Generate return and raises table in markdown format\\n\",\n    \"    \\n\",\n    \"    Args:\\n\",\n    \"        symbol_annotations: Symbol annotations along with its default value\\n\",\n    \"        section_items: The parameter section of a parsed docstring\\n\",\n    \"        section_name: The name of the section\\n\",\n    \"        \\n\",\n    \"    Returns:\\n\",\n    \"        The return and raises section of a symbol in markdown-formatted string\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    nl = \\\"\\\\n\\\"\\n\",\n    \"    _section_template = \\\"|  Type | Description |\\\\n|---|---|\\\\n{section_body}\\\\n\\\"\\n\",\n    \"    section_body = \\\"\\\".join(\\n\",\n    \"        [\\n\",\n    \"            f'| `{symbol_annotations[\\\"return\\\"] if section_name == \\\"Returns\\\" else section.type_name}` | {section.description.replace(nl, \\\"\\\")} |\\\\n' # type: ignore\\n\",\n    \"            for section in section_items\\n\",\n    \"        ]\\n\",\n    \"    )\\n\",\n    \"    return f\\\"**{section_name}**:\\\\n\\\\n\\\" + _section_template.format(\\n\",\n    \"        section_body=section_body,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f14971a8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Returns**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `str` | A formatted string |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"params_dict = {\\n\",\n    \"    \\\"name\\\": {\\\"type\\\": \\\"str\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"    \\\"age\\\": {\\\"type\\\": \\\"int\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"}\\n\",\n    \"return_dict = \\\"str\\\"\\n\",\n    \"signature_dict = {\\\"parameters\\\": params_dict, \\\"return\\\": return_dict}\\n\",\n    \"\\n\",\n    \"fixture_docstring = parse(\\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A formatted string\\n\",\n    \"    \\\"\\\"\\\")\\n\",\n    \"\\n\",\n    \"actual = _generate_return_and_raises_table(\\n\",\n    \"    signature_dict, fixture_docstring.many_returns, \\\"Returns\\\"\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"**Returns**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `str` | A formatted string |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1ce3f080\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Exceptions**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `ValueError` | If name is not a string |\\n\",\n      \"| `TypeError` | If type is invalid |\\n\",\n      \"| `KeyError` | If key name is invalid |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"params_dict = {\\n\",\n    \"    \\\"name\\\": {\\\"type\\\": \\\"str\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"    \\\"age\\\": {\\\"type\\\": \\\"int\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"}\\n\",\n    \"return_dict = \\\"str\\\"\\n\",\n    \"signature_dict = {\\\"parameters\\\": params_dict, \\\"return\\\": return_dict}\\n\",\n    \"\\n\",\n    \"fixture_docstring = parse(\\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If name is not a string\\n\",\n    \"        TypeError: If type is invalid\\n\",\n    \"        KeyError: If key name is invalid\\n\",\n    \"    \\\"\\\"\\\")\\n\",\n    \"\\n\",\n    \"actual = _generate_return_and_raises_table(\\n\",\n    \"    signature_dict, fixture_docstring.raises, \\\"Exceptions\\\"\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"**Exceptions**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `ValueError` | If name is not a string |\\n\",\n    \"| `TypeError` | If type is invalid |\\n\",\n    \"| `KeyError` | If key name is invalid |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"696bcb7f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _format_docstring_section_items(\\n\",\n    \"    symbol_annotations: Dict[str, Union[Dict[str, str], str]],\\n\",\n    \"    section_items: Union[\\n\",\n    \"        List[DocstringParam], List[DocstringReturns], List[DocstringRaises]\\n\",\n    \"    ],\\n\",\n    \"    section_name: str,\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Format the docstring sections in a table format\\n\",\n    \"    \\n\",\n    \"    Args:\\n\",\n    \"        symbol_annotations: Symbol annotations along with its default value\\n\",\n    \"        section_items: The parameter section of a parsed docstring\\n\",\n    \"        section_name: The name of the section\\n\",\n    \"        \\n\",\n    \"    Returns:\\n\",\n    \"        The docstring sections of the symbol in markdown-formatted string\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if section_name == \\\"Parameters\\\":\\n\",\n    \"        return _generate_parameters_table(symbol_annotations, section_items, section_name) # type: ignore\\n\",\n    \"    else:\\n\",\n    \"        return _generate_return_and_raises_table(symbol_annotations, section_items, section_name) # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dbdaccee\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Parameters**:\\n\",\n      \"\\n\",\n      \"|  Name | Type | Description | Default |\\n\",\n      \"|---|---|---|---|\\n\",\n      \"| `name` | str | name of the person | *required* |\\n\",\n      \"| `age` | int | age of the person | *required* |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"params_dict = {\\n\",\n    \"    \\\"name\\\": {\\\"type\\\": \\\"str\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"    \\\"age\\\": {\\\"type\\\": \\\"int\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"}\\n\",\n    \"return_dict = \\\"str\\\"\\n\",\n    \"signature_dict = {\\\"parameters\\\": params_dict, \\\"return\\\": return_dict}\\n\",\n    \"\\n\",\n    \"fixture_docstring = parse(\\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    It can contain multiple lines and can include *markdown* syntax.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        age: age of the person\\n\",\n    \"        some_param: some_param of the person\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A formatted string\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If name is not a string\\n\",\n    \"    \\\"\\\"\\\")\\n\",\n    \"\\n\",\n    \"actual = _format_docstring_section_items(\\n\",\n    \"    signature_dict, fixture_docstring.params, \\\"Parameters\\\"\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"**Parameters**:\\n\",\n    \"\\n\",\n    \"|  Name | Type | Description | Default |\\n\",\n    \"|---|---|---|---|\\n\",\n    \"| `name` | str | name of the person | *required* |\\n\",\n    \"| `age` | int | age of the person | *required* |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3385419e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Parameters**:\\n\",\n      \"\\n\",\n      \"|  Name | Type | Description | Default |\\n\",\n      \"|---|---|---|---|\\n\",\n      \"| `name` | str | name of the person | *required* |\\n\",\n      \"| `age` | int | age of the person | *required* |\\n\",\n      \"| `some_param` | Dict[str, int] | some_param of the person | `{}` |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"params_dict = {\\n\",\n    \"    \\\"name\\\": {\\\"type\\\": \\\"str\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"    \\\"age\\\": {\\\"type\\\": \\\"int\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"    \\\"some_param\\\": {\\\"type\\\": \\\"Dict[str, int]\\\", \\\"default\\\": \\\"`{}`\\\"},\\n\",\n    \"}\\n\",\n    \"return_dict = \\\"str\\\"\\n\",\n    \"signature_dict = {\\\"parameters\\\": params_dict, \\\"return\\\": return_dict}\\n\",\n    \"\\n\",\n    \"fixture_docstring = parse(\\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    It can contain multiple lines and can include *markdown* syntax.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        age: age of the person\\n\",\n    \"        some_param: some_param of the person\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A formatted string\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If name is not a string\\n\",\n    \"    \\\"\\\"\\\")\\n\",\n    \"\\n\",\n    \"actual = _format_docstring_section_items(\\n\",\n    \"    signature_dict, fixture_docstring.params, \\\"Parameters\\\"\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"**Parameters**:\\n\",\n    \"\\n\",\n    \"|  Name | Type | Description | Default |\\n\",\n    \"|---|---|---|---|\\n\",\n    \"| `name` | str | name of the person | *required* |\\n\",\n    \"| `age` | int | age of the person | *required* |\\n\",\n    \"| `some_param` | Dict[str, int] | some_param of the person | `{}` |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"859594bc\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Returns**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `str` | A formatted string |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"actual = _format_docstring_section_items(\\n\",\n    \"    signature_dict, fixture_docstring.many_returns, \\\"Returns\\\"\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"**Returns**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `str` | A formatted string |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f386d0a5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Exceptions**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `ValueError` | If name is not a string |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"actual = _format_docstring_section_items(\\n\",\n    \"    signature_dict, fixture_docstring.raises, \\\"Exceptions\\\"\\n\",\n    \")\\n\",\n    \"expected = \\\"\\\"\\\"**Exceptions**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `ValueError` | If name is not a string |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0293aabf\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_annotation(symbol: Type) -> Dict[str, Union[Dict[str, Dict[str, str]], str]]:\\n\",\n    \"    \\\"\\\"\\\"Get annotations along with its default value for a symbol\\n\",\n    \"    \\n\",\n    \"    Args:\\n\",\n    \"        symbol: The symbol for which the annotations needs to be extracted\\n\",\n    \"        \\n\",\n    \"    Returns:\\n\",\n    \"        The annotations dict along with its default value\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    symbol = symbol.fget if isinstance(symbol, property) else symbol\\n\",\n    \"    symbol_signature = signature(symbol)\\n\",\n    \"    params_dict = _get_params_annotation(symbol_signature)\\n\",\n    \"    return_annotation = _get_return_annotation(symbol_signature)\\n\",\n    \"    return {\\n\",\n    \"        \\\"parameters\\\": params_dict,\\n\",\n    \"        \\\"return\\\": return_annotation\\n\",\n    \"    }\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ac8d74a6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'parameters': {'name': {'type': '`str`', 'default': '*required*'},\\n\",\n       \"  'info': {'type': '`Tuple[int, str, float]`', 'default': '`80`'},\\n\",\n       \"  'contact': {'type': '`Optional[str]`', 'default': '`None`'}},\\n\",\n       \" 'return': 'str'}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"@property\\n\",\n    \"def fixture(name: str, info: Tuple[int, str, float] = 80, contact: Optional[str] = None) -> str:\\n\",\n    \"    \\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        info: info of the person\\n\",\n    \"        contact: optional contact for the documentation. If None, the\\n\",\n    \"            contact will be set to placeholder values:\\n\",\n    \"            name='Author' url=HttpUrl('https://www.google.com', ) email='noreply@gmail.com'\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_optional_type = \\\"Union[str, NoneType]\\\" if f\\\"{sys.version_info.major}.{sys.version_info.minor}\\\" == \\\"3.8\\\" else \\\"Optional[str]\\\"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"parameters\\\": {\\n\",\n    \"        \\\"name\\\": {\\\"type\\\": \\\"`str`\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"        \\\"info\\\": {\\\"type\\\": \\\"`Tuple[int, str, float]`\\\", \\\"default\\\": \\\"`80`\\\"},\\n\",\n    \"        \\\"contact\\\": {\\\"type\\\": f\\\"`{_optional_type}`\\\", \\\"default\\\": \\\"`None`\\\"},\\n\",\n    \"    },\\n\",\n    \"    \\\"return\\\": \\\"str\\\"\\n\",\n    \"}\\n\",\n    \"actual = _get_annotation(fixture)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cef46c80\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'parameters': {'name': {'type': '`str`', 'default': '*required*'},\\n\",\n       \"  'info': {'type': '`Union[str, List[str]]`', 'default': '*required*'},\\n\",\n       \"  'contact': {'type': '`Optional[str]`', 'default': \\\"`'default_email.com'`\\\"}},\\n\",\n       \" 'return': ''}\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"def fixture(name: str, info: Union[str, List[str]], contact: Optional[str] = \\\"default_email.com\\\"):\\n\",\n    \"    \\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        info: info of the person\\n\",\n    \"        contact: optional contact for the documentation. If None, the\\n\",\n    \"            contact will be set to placeholder values:\\n\",\n    \"            name='Author' url=HttpUrl('https://www.google.com', ) email='noreply@gmail.com'\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_optional_type = \\\"Union[str, NoneType]\\\" if f\\\"{sys.version_info.major}.{sys.version_info.minor}\\\" == \\\"3.8\\\" else \\\"Optional[str]\\\"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"parameters\\\": {\\n\",\n    \"        \\\"name\\\": {\\\"type\\\": \\\"`str`\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"        \\\"info\\\": {\\\"type\\\": \\\"`Union[str, List[str]]`\\\", \\\"default\\\": \\\"*required*\\\"},\\n\",\n    \"        \\\"contact\\\": {\\\"type\\\": f\\\"`{_optional_type}`\\\", \\\"default\\\": \\\"`'default_email.com'`\\\"}\\n\",\n    \"    },\\n\",\n    \"    \\\"return\\\": \\\"\\\"\\n\",\n    \"}\\n\",\n    \"actual = _get_annotation(fixture)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1f1a4f22\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _format_docstring_sections(\\n\",\n    \"    symbol: Type, parsed_docstring: Docstring\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Format the parsed docstring sections into markdown-formatted table\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        symbol: The symbol for which to parse the docstring.\\n\",\n    \"        parsed_docstring: A Docstring object\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The markdown-formatted docstring.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    symbol_annotations = _get_annotation(symbol)\\n\",\n    \"    formatted_docstring = \\\"\\\"\\n\",\n    \"    sections = [\\n\",\n    \"        (\\\"Parameters\\\", parsed_docstring.params),\\n\",\n    \"        (\\\"Returns\\\", parsed_docstring.many_returns),\\n\",\n    \"        (\\\"Exceptions\\\", parsed_docstring.raises),\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    for section_name, section_items in sections:\\n\",\n    \"        if len(section_items) > 0:  # type: ignore\\n\",\n    \"            formatted_docstring += _format_docstring_section_items(\\n\",\n    \"                symbol_annotations, section_items, section_name  # type: ignore\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"    return formatted_docstring\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d585ca4d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"**Parameters**:\\n\",\n      \"\\n\",\n      \"|  Name | Type | Description | Default |\\n\",\n      \"|---|---|---|---|\\n\",\n      \"| `name` | `str` | name of the person | *required* |\\n\",\n      \"| `age` | `int` | age of the person | *required* |\\n\",\n      \"\\n\",\n      \"**Exceptions**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `ValueError` | If name is not a string |\\n\",\n      \"| `TypeError` | If name is not a string |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture(name: str, age: int):\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    It can contain multiple lines and can include *markdown* syntax.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        age: age of the person\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If name is not a string\\n\",\n    \"        TypeError: If name is not a string\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"parsed_docstring = parse(fixture.__doc__)\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"**Parameters**:\\n\",\n    \"\\n\",\n    \"|  Name | Type | Description | Default |\\n\",\n    \"|---|---|---|---|\\n\",\n    \"| `name` | `str` | name of the person | *required* |\\n\",\n    \"| `age` | `int` | age of the person | *required* |\\n\",\n    \"\\n\",\n    \"**Exceptions**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `ValueError` | If name is not a string |\\n\",\n    \"| `TypeError` | If name is not a string |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"actual = _format_docstring_sections(fixture, parsed_docstring)\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e06e2b05\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _format_free_links(s: str) -> str:\\n\",\n    \"    \\\"\\\"\\\"Format free links in a given string by adding proper spacing around them.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        s: The input string containing free links.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The modified string with properly formatted free links.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pattern = r\\\"([\\\\\\\"'])(https?:\\\\/\\\\/[^\\\\s]+)([\\\\\\\"'])\\\"\\n\",\n    \"    ret_val = re.sub(\\n\",\n    \"        pattern, lambda match: f\\\"{match.group(1)} {match.group(2)} {match.group(3)}\\\", s\\n\",\n    \"    )\\n\",\n    \"    return ret_val\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"83f6543a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\n\",\n      \"Click [here](https://www.example.com) to explore Example\\n\",\n      \"Learn more at [Example](http://www.example.co.in)\\n\",\n      \"Discover at [https://www.example.edu](https://www.example.edu)\\n\",\n      \"[Example](https://www.example.co.uk) is worth exploring\\n\",\n      \"\\n\",\n      \"url=HttpUrl(' https://www.google.com ', )\\n\",\n      \"url=HttpUrl(' http://www.example.net ', )\\n\",\n      \"\\\" https://www.google.edu \\\"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"\\n\",\n    \"Click [here](https://www.example.com) to explore Example\\n\",\n    \"Learn more at [Example](http://www.example.co.in)\\n\",\n    \"Discover at [https://www.example.edu](https://www.example.edu)\\n\",\n    \"[Example](https://www.example.co.uk) is worth exploring\\n\",\n    \"\\n\",\n    \"url=HttpUrl('https://www.google.com', )\\n\",\n    \"url=HttpUrl('http://www.example.net', )\\n\",\n    \"\\\"https://www.google.edu\\\"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"expected = \\\"\\\"\\\"\\n\",\n    \"Click [here](https://www.example.com) to explore Example\\n\",\n    \"Learn more at [Example](http://www.example.co.in)\\n\",\n    \"Discover at [https://www.example.edu](https://www.example.edu)\\n\",\n    \"[Example](https://www.example.co.uk) is worth exploring\\n\",\n    \"\\n\",\n    \"url=HttpUrl(' https://www.google.com ', )\\n\",\n    \"url=HttpUrl(' http://www.example.net ', )\\n\",\n    \"\\\" https://www.google.edu \\\"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"actual = _format_free_links(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"078ccbed\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _docstring_to_markdown(symbol: Type) -> str:\\n\",\n    \"    \\\"\\\"\\\"Converts a docstring to a markdown-formatted string.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        symbol: The symbol for which the documentation needs to be generated in markdown format.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The markdown-formatted docstring.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if symbol.__doc__ is None:\\n\",\n    \"        return \\\"\\\"\\n\",\n    \"\\n\",\n    \"    parsed_docstring = parse(symbol.__doc__)\\n\",\n    \"    formatted_docstring = f\\\"{parsed_docstring.short_description}\\\\n\\\\n\\\"\\n\",\n    \"    formatted_docstring += (\\n\",\n    \"        f\\\"{parsed_docstring.long_description}\\\\n\\\\n\\\"\\n\",\n    \"        if parsed_docstring.long_description\\n\",\n    \"        else \\\"\\\"\\n\",\n    \"    )\\n\",\n    \"    formatted_docstring += _format_docstring_sections(symbol, parsed_docstring)\\n\",\n    \"    ret_val = _format_free_links(formatted_docstring)\\n\",\n    \"\\n\",\n    \"    return ret_val\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d304a619\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture(name: str, age: int, contact: str):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\n\",\n    \"actual = _docstring_to_markdown(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dd702f11\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"This is a docstring for a sample function.\\n\",\n      \"\\n\",\n      \"**Parameters**:\\n\",\n      \"\\n\",\n      \"|  Name | Type | Description | Default |\\n\",\n      \"|---|---|---|---|\\n\",\n      \"| `name` | `str` | name of the person | *required* |\\n\",\n      \"| `age` | `int` | age of the person | *required* |\\n\",\n      \"| `contact` | `str` | optional contact for the documentation. If None, thecontact will be set to placeholder values:name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com' | *required* |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture(name: str, age: int, contact: str):\\n\",\n    \"    \\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        age: age of the person\\n\",\n    \"        contact: optional contact for the documentation. If None, the\\n\",\n    \"            contact will be set to placeholder values:\\n\",\n    \"            name='Author' url=HttpUrl('https://www.google.com', ) email='noreply@gmail.com'\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"**Parameters**:\\n\",\n    \"\\n\",\n    \"|  Name | Type | Description | Default |\\n\",\n    \"|---|---|---|---|\\n\",\n    \"| `name` | `str` | name of the person | *required* |\\n\",\n    \"| `age` | `int` | age of the person | *required* |\\n\",\n    \"| `contact` | `str` | optional contact for the documentation. If None, thecontact will be set to placeholder values:name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com' | *required* |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"actual = _docstring_to_markdown(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9017c683\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"This is a docstring for a sample function.\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture():\\n\",\n    \"    \\\"\\\"\\\"This is a docstring for a sample function.\\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"actual = _docstring_to_markdown(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3ff970bc\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"This is a docstring for a sample function.\\n\",\n      \"\\n\",\n      \"It can contain multiple lines and can include *markdown* syntax.\\n\",\n      \"\\n\",\n      \"**Parameters**:\\n\",\n      \"\\n\",\n      \"|  Name | Type | Description | Default |\\n\",\n      \"|---|---|---|---|\\n\",\n      \"| `name` | `str` | name of the person | *required* |\\n\",\n      \"| `age` | `int` | age of the person | *required* |\\n\",\n      \"\\n\",\n      \"**Returns**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `str` | A formatted string |\\n\",\n      \"\\n\",\n      \"**Exceptions**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `ValueError` | If name is not a string |\\n\",\n      \"| `TypeError` | If name is not a string |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture(name: str, age: int) -> str:\\n\",\n    \"    \\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"    It can contain multiple lines and can include *markdown* syntax.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: name of the person\\n\",\n    \"        age: age of the person\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A formatted string\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If name is not a string\\n\",\n    \"        TypeError: If name is not a string\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"This is a docstring for a sample function.\\n\",\n    \"\\n\",\n    \"It can contain multiple lines and can include *markdown* syntax.\\n\",\n    \"\\n\",\n    \"**Parameters**:\\n\",\n    \"\\n\",\n    \"|  Name | Type | Description | Default |\\n\",\n    \"|---|---|---|---|\\n\",\n    \"| `name` | `str` | name of the person | *required* |\\n\",\n    \"| `age` | `int` | age of the person | *required* |\\n\",\n    \"\\n\",\n    \"**Returns**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `str` | A formatted string |\\n\",\n    \"\\n\",\n    \"**Exceptions**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `ValueError` | If name is not a string |\\n\",\n    \"| `TypeError` | If name is not a string |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"actual = _docstring_to_markdown(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"82fbe401\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_submodules(module_name: str) -> List[str]:\\n\",\n    \"    \\\"\\\"\\\"Get a list of all submodules contained within the module.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        module_name: The name of the module to retrieve submodules from\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of submodule names within the module\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    members = _import_all_members(module_name)\\n\",\n    \"    members_with_submodules = _add_all_submodules(members)\\n\",\n    \"    members_with_submodules_str: List[str] = [\\n\",\n    \"        x[:-1] if x.endswith(\\\".\\\") else x for x in members_with_submodules\\n\",\n    \"    ]\\n\",\n    \"    return members_with_submodules_str\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"75b8c561\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"['fastkafka',\\n\",\n       \" 'fastkafka.EventMetadata',\\n\",\n       \" 'fastkafka.FastKafka',\\n\",\n       \" 'fastkafka.KafkaEvent',\\n\",\n       \" 'fastkafka.encoder',\\n\",\n       \" 'fastkafka.encoder.AvroBase',\\n\",\n       \" 'fastkafka.encoder.avro_decoder',\\n\",\n       \" 'fastkafka.encoder.avro_encoder',\\n\",\n       \" 'fastkafka.encoder.avsc_to_pydantic',\\n\",\n       \" 'fastkafka.encoder.json_decoder',\\n\",\n       \" 'fastkafka.encoder.json_encoder',\\n\",\n       \" 'fastkafka.executors',\\n\",\n       \" 'fastkafka.executors.DynamicTaskExecutor',\\n\",\n       \" 'fastkafka.executors.SequentialExecutor',\\n\",\n       \" 'fastkafka.testing',\\n\",\n       \" 'fastkafka.testing.ApacheKafkaBroker',\\n\",\n       \" 'fastkafka.testing.LocalRedpandaBroker',\\n\",\n       \" 'fastkafka.testing.Tester']\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"module_name = \\\"fastkafka\\\"\\n\",\n    \"members_with_submodules = _get_submodules(module_name)\\n\",\n    \"members_with_submodules\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dec44e68\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _load_submodules(\\n\",\n    \"    module_name: str, members_with_submodules: List[str]\\n\",\n    \") -> List[Type]:\\n\",\n    \"    \\\"\\\"\\\"Load the given submodules from the module.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        module_name: The name of the module whose submodules to load\\n\",\n    \"        members_with_submodules: A list of submodule names to load\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of imported submodule objects.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    submodules = _import_submodules(module_name)\\n\",\n    \"    members: List[Tuple[str, Type]] = list(\\n\",\n    \"        itertools.chain(*[_import_functions_and_classes(m) for m in submodules])\\n\",\n    \"    )\\n\",\n    \"    names = [\\n\",\n    \"        y\\n\",\n    \"        for x, y in members\\n\",\n    \"        if f\\\"{y.__module__}.{y.__name__}\\\" in members_with_submodules\\n\",\n    \"    ]\\n\",\n    \"    return names\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a7d3f5b9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"[fastkafka.EventMetadata,\\n\",\n       \" fastkafka.FastKafka,\\n\",\n       \" fastkafka.KafkaEvent,\\n\",\n       \" fastkafka.encoder.AvroBase,\\n\",\n       \" <function fastkafka.encoder.avro_decoder(raw_msg: bytes, cls: Type[pydantic.main.BaseModel]) -> Any>,\\n\",\n       \" <function fastkafka.encoder.avro_encoder(msg: pydantic.main.BaseModel) -> bytes>,\\n\",\n       \" <function fastkafka.encoder.avsc_to_pydantic(schema: Dict[str, Any]) -> Type[pydantic.main.BaseModel]>,\\n\",\n       \" <function fastkafka.encoder.json_decoder(raw_msg: bytes, cls: Type[pydantic.main.BaseModel]) -> Any>,\\n\",\n       \" <function fastkafka.encoder.json_encoder(msg: pydantic.main.BaseModel) -> bytes>,\\n\",\n       \" fastkafka.executors.DynamicTaskExecutor,\\n\",\n       \" fastkafka.executors.SequentialExecutor,\\n\",\n       \" fastkafka.testing.ApacheKafkaBroker,\\n\",\n       \" fastkafka.testing.LocalRedpandaBroker,\\n\",\n       \" fastkafka.testing.Tester]\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"module_name = \\\"fastkafka\\\"\\n\",\n    \"members_with_submodules = _get_submodules(module_name)\\n\",\n    \"symbols = _load_submodules(module_name, members_with_submodules)\\n\",\n    \"symbols\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a307153c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_parameters(_signature: Signature) -> List[str]:\\n\",\n    \"    \\\"\\\"\\\"Convert a function's signature into a string representation of its parameter list.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        _signature: The signature object representing the function's signature.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of strings representing the function's parameters, including their default values if applicable.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    params = [param for param in _signature.parameters.values()]\\n\",\n    \"    ret_val = [\\n\",\n    \"            f\\\"{param.name}\\\"\\n\",\n    \"            if (param.default is param.empty)\\n\",\n    \"            else f\\\"{param.name}='{param.default}'\\\"\\n\",\n    \"            if isinstance(param.default, str)\\n\",\n    \"            else f\\\"{param.name}={param.default}\\\"\\n\",\n    \"            for param in params\\n\",\n    \"        ]\\n\",\n    \"    return ret_val\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"37de04bd\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"['arg_1', 'arg_2', 'arg_3', 'arg_4=80']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(\\n\",\n    \"    arg_1: str, arg_2, arg_3: Union[Dict[str, str], str], arg_4: Optional[int] = 80\\n\",\n    \") -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"\\n\",\n    \"expected = ['arg_1', 'arg_2', 'arg_3', 'arg_4=80']\\n\",\n    \"actual = _get_parameters(_signature)\\n\",\n    \"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"101243e8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"['arg_1', 'arg_2']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(arg_1: str, arg_2) -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"\\n\",\n    \"expected = [\\\"arg_1\\\", \\\"arg_2\\\"]\\n\",\n    \"actual = _get_parameters(_signature)\\n\",\n    \"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ca2578a3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"['arg_1', \\\"arg_2='default_string'\\\", 'arg_3={}', 'arg_4=None', \\\"arg_5=(1, 'string', 2.0)\\\", \\\"arg_6=[1, 'string']\\\", 'arg_7={1, 2, 3}', \\\"arg_8='string'\\\"]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(\\n\",\n    \"    arg_1: int,\\n\",\n    \"    *,\\n\",\n    \"    arg_2: str = \\\"default_string\\\",\\n\",\n    \"    arg_3: Dict[str, int] = {},\\n\",\n    \"    arg_4: Optional[float] = None,\\n\",\n    \"    arg_5: Tuple[int, str, float] = (1, \\\"string\\\", 2.0),\\n\",\n    \"    arg_6: List[Union[int, str]] = [1, \\\"string\\\"],\\n\",\n    \"    arg_7: Set[int] = {1,2,3},\\n\",\n    \"    arg_8: str = \\\"string\\\"\\n\",\n    \") -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"params = [param for param in _signature.parameters.values()]\\n\",\n    \"expected = ['arg_1', \\\"arg_2='default_string'\\\", 'arg_3={}', 'arg_4=None', \\\"arg_5=(1, 'string', 2.0)\\\", \\\"arg_6=[1, 'string']\\\", 'arg_7={1, 2, 3}', \\\"arg_8='string'\\\"]\\n\",\n    \"actual = _get_parameters(_signature)\\n\",\n    \"\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"995e4b77\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _format_symbol_definition(\\n\",\n    \"    symbol: Type, params_list: List[str]\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Format the given symbol parameters by adding a new line and indentation.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        symbol: The symbol for which the symbol definition needs to be formatted.\\n\",\n    \"        params_list: A string representation of the parameter list.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A formatted string representation of the parameters with new lines and indentation.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    parameters = \\\", \\\".join(params_list)\\n\",\n    \"    if parameters == \\\"\\\":\\n\",\n    \"        return f\\\"{symbol.__name__}()\\\\n\\\"\\n\",\n    \"    elif len(f\\\"{symbol.__name__}({parameters})\\\") <= 79:\\n\",\n    \"        return f\\\"{symbol.__name__}(\\\\n    {parameters}\\\\n)\\\\n\\\"\\n\",\n    \"    else:\\n\",\n    \"        formatted_parameters = \\\"\\\".join([f\\\"\\\\n    {param},\\\" for param in params_list])\\n\",\n    \"        return f\\\"{symbol.__name__}({formatted_parameters}\\\\n)\\\\n\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c27fb036\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"fixture_function()\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function():\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"fixture_function()\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"parameters = _get_parameters(_signature)\\n\",\n    \"parameters\\n\",\n    \"\\n\",\n    \"actual = _format_symbol_definition(fixture_function, parameters)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5730ad25\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"fixture_function(\\n\",\n      \"    arg_1\\n\",\n      \")\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(arg_1: str) -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"fixture_function(\\n\",\n    \"    arg_1\\n\",\n    \")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"parameters = _get_parameters(_signature)\\n\",\n    \"actual = _format_symbol_definition(fixture_function, parameters)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5a820ac9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"fixture_function(\\n\",\n      \"    arg_1, arg_2, arg_3=80\\n\",\n      \")\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(\\n\",\n    \"    arg_1: str, *, arg_2, arg_3: Optional[int] = 80\\n\",\n    \") -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"fixture_function(\\n\",\n    \"    arg_1, arg_2, arg_3=80\\n\",\n    \")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"parameters = _get_parameters(_signature)\\n\",\n    \"actual = _format_symbol_definition(fixture_function, parameters)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"eb70e2e9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"fixture_function(\\n\",\n      \"    arg_1, arg_2, arg_3=None, arg_4={}, arg_5=(1, 'string', 2.0)\\n\",\n      \")\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(arg_1: str, arg_2, arg_3: Optional[str] = None, arg_4:  Dict[str, int] = {}, arg_5: Tuple[int, str, float] = (1, \\\"string\\\", 2.0)) -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"fixture_function(\\n\",\n    \"    arg_1, arg_2, arg_3=None, arg_4={}, arg_5=(1, 'string', 2.0)\\n\",\n    \")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"parameters = _get_parameters(_signature)\\n\",\n    \"actual = _format_symbol_definition(fixture_function, parameters)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"34d63768\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"fixture_function(\\n\",\n      \"    arg_1,\\n\",\n      \"    arg_2='default_string',\\n\",\n      \"    arg_3={},\\n\",\n      \"    arg_4=None,\\n\",\n      \"    arg_5=(1, 'string', 2.0),\\n\",\n      \"    arg_6=[1, 'string'],\\n\",\n      \"    arg_7={1, 2, 3},\\n\",\n      \"    arg_8='string',\\n\",\n      \")\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(\\n\",\n    \"    arg_1: int,\\n\",\n    \"    arg_2: str = \\\"default_string\\\",\\n\",\n    \"    arg_3: Dict[str, int] = {},\\n\",\n    \"    arg_4: Optional[float] = None,\\n\",\n    \"    arg_5: Tuple[int, str, float] = (1, \\\"string\\\", 2.0),\\n\",\n    \"    arg_6: List[Union[int, str]] = [1, \\\"string\\\"],\\n\",\n    \"    arg_7: Set[int] = {1,2,3},\\n\",\n    \"    arg_8: str = \\\"string\\\"\\n\",\n    \") -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"fixture_function(\\n\",\n    \"    arg_1,\\n\",\n    \"    arg_2='default_string',\\n\",\n    \"    arg_3={},\\n\",\n    \"    arg_4=None,\\n\",\n    \"    arg_5=(1, 'string', 2.0),\\n\",\n    \"    arg_6=[1, 'string'],\\n\",\n    \"    arg_7={1, 2, 3},\\n\",\n    \"    arg_8='string',\\n\",\n    \")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"_signature = signature(fixture_function)\\n\",\n    \"parameters = _get_parameters(_signature)\\n\",\n    \"\\n\",\n    \"actual = _format_symbol_definition(fixture_function, parameters)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0f960bf1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"['/tmp/tmporcyfl70', '/work/fastkafka/nbs', '/usr/lib/python311.zip', '/usr/lib/python3.11', '/usr/lib/python3.11/lib-dynload', '', '/home/harish/.local/lib/python3.11/site-packages', '/work/fastkafka', '/usr/local/lib/python3.11/dist-packages', '/usr/lib/python3/dist-packages']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"@contextmanager\\n\",\n    \"def add_tmp_path_to_sys_path(dir_):\\n\",\n    \"    dir_ = Path(dir_).absolute().resolve(strict=True)\\n\",\n    \"    original_path = sys.path[:]\\n\",\n    \"    sys.path.insert(0, str(dir_))\\n\",\n    \"    try:\\n\",\n    \"        yield\\n\",\n    \"    finally:\\n\",\n    \"        sys.path = original_path\\n\",\n    \"        \\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    with add_tmp_path_to_sys_path(d):\\n\",\n    \"        actual = sys.path[:]\\n\",\n    \"        print(actual)\\n\",\n    \"        assert str(Path(d).resolve()) in actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ef379f1f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _get_exps(mod: str) -> Dict[str, str]:\\n\",\n    \"    mf = _find_mod(mod)\\n\",\n    \"    if not mf: return {}\\n\",\n    \"    txt = mf.read_text()\\n\",\n    \"    _def_types = ast.FunctionDef,ast.AsyncFunctionDef,ast.ClassDef\\n\",\n    \"    d = {}\\n\",\n    \"    for tree in ast.parse(txt).body:\\n\",\n    \"        if isinstance(tree, _def_types):\\n\",\n    \"            for t in L(patch_name(tree)): d[t] = f\\\"{tree.lineno}-L{tree.end_lineno}\\\"\\n\",\n    \"        if isinstance(tree, ast.ClassDef): d.update({tree.name+\\\".\\\"+t2.name: f\\\"{t2.lineno}-L{t2.end_lineno}\\\" for t2 in tree.body if isinstance(t2, _def_types)})\\n\",\n    \"    return d\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"00ceebf0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'FixtureClass': '5-L22', 'FixtureClass.__init__': '6-L8', 'FixtureClass.class_method': '11-L13', 'FixtureClass.static_method': '16-L18', 'FixtureClass.instance_method': '20-L22'}\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"module_code = '''__all__ = ['FixtureClass']\\n\",\n    \"\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"class FixtureClass:    \\n\",\n    \"    def __init__(self, attribute):\\n\",\n    \"        \\\"\\\"\\\"__init__ documentation\\\"\\\"\\\"\\n\",\n    \"        self.attribute = attribute\\n\",\n    \"    \\n\",\n    \"    @classmethod\\n\",\n    \"    def class_method(cls):\\n\",\n    \"        \\\"\\\"\\\"class_method documentation\\\"\\\"\\\"\\n\",\n    \"        return cls.class_variable\\n\",\n    \"    \\n\",\n    \"    @staticmethod\\n\",\n    \"    def static_method():\\n\",\n    \"        \\\"\\\"\\\"static_method documentation\\\"\\\"\\\"\\n\",\n    \"        return \\\"This is a static method\\\"\\n\",\n    \"    \\n\",\n    \"    def instance_method(self):\\n\",\n    \"        \\\"\\\"\\\"instance_method documentation\\\"\\\"\\\"\\n\",\n    \"        return \\\"This is an instance method\\\"\\n\",\n    \"'''\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    my_package = f\\\"mypackage_{random.randint(0, 1000)}\\\"\\n\",\n    \"    module_name = \\\"mymodule\\\"\\n\",\n    \"\\n\",\n    \"    my_package_path = Path(d) / my_package\\n\",\n    \"    my_package_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    file_path = my_package_path / f\\\"{module_name}.py\\\"\\n\",\n    \"\\n\",\n    \"    with open(file_path, \\\"w\\\", encoding=\\\"utf-8\\\") as file:\\n\",\n    \"        file.write(module_code)\\n\",\n    \"\\n\",\n    \"    with open((my_package_path / \\\"__init__.py\\\"), \\\"w\\\") as f:\\n\",\n    \"        f.write('__version__ = \\\"0.0.1\\\"')\\n\",\n    \"\\n\",\n    \"    with add_tmp_path_to_sys_path(d):\\n\",\n    \"        actual = _get_exps(f\\\"{my_package}/{module_name}.py\\\")\\n\",\n    \"        expected = {\\n\",\n    \"            \\\"FixtureClass\\\": \\\"5-L22\\\",\\n\",\n    \"            \\\"FixtureClass.__init__\\\": \\\"6-L8\\\",\\n\",\n    \"            \\\"FixtureClass.class_method\\\": \\\"11-L13\\\",\\n\",\n    \"            \\\"FixtureClass.static_method\\\": \\\"16-L18\\\",\\n\",\n    \"            \\\"FixtureClass.instance_method\\\": \\\"20-L22\\\",\\n\",\n    \"        }\\n\",\n    \"        print(actual)\\n\",\n    \"        assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c5a34e8e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _lineno(sym: str, fname: str) -> Optional[str]:\\n\",\n    \"    return _get_exps(fname).get(sym, None) if fname else None\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@lru_cache(None)\\n\",\n    \"class CustomNbdevLookup(NbdevLookup.__wrapped__): # type: ignore\\n\",\n    \"    def __init__(\\n\",\n    \"        self, strip_libs: Optional[str] = None, incl_libs: Optional[str] = None, skip_mods: Optional[str] = None\\n\",\n    \"    ):\\n\",\n    \"        super().__init__(strip_libs, incl_libs, skip_mods)\\n\",\n    \"\\n\",\n    \"    def code(self, sym: str) -> Optional[str]:\\n\",\n    \"        \\\"Link to source code for `sym`\\\"\\n\",\n    \"        res = self[sym]\\n\",\n    \"        if not isinstance(res, tuple):\\n\",\n    \"            return None\\n\",\n    \"        _, py, gh = res\\n\",\n    \"        line = _lineno(sym, py)\\n\",\n    \"        return f\\\"{gh}#L{line}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5c94972e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L177-L427\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"actual = CustomNbdevLookup().code('FastKafka')\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"pattern = r'#L\\\\d+-L\\\\d+'\\n\",\n    \"assert re.search(pattern, actual)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4300c760\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"@contextmanager\\n\",\n    \"def mock_custom_nbdev_lookup():\\n\",\n    \"    with patch('__main__.CustomNbdevLookup') as MockCustomNbdevLookup:\\n\",\n    \"        instance = MockCustomNbdevLookup.return_value\\n\",\n    \"        instance.code.return_value = \\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\"\\n\",\n    \"        yield\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = CustomNbdevLookup().code(\\\"some_symbol_qualname\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    expected = \\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\"\\n\",\n    \"    assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1f1e8563\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_symbol_source_link(symbol: Type, lib_version: str) -> str:\\n\",\n    \"    \\\"\\\"\\\"Returns the source code link for a given symbol.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        symbol: The symbol to get the source code link for.\\n\",\n    \"        lib_version: The current version of the library.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The source code link for the symbol.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    symbol = symbol.fget if isinstance(symbol, property) else symbol\\n\",\n    \"    source_link = CustomNbdevLookup().code(f\\\"{symbol.__qualname__}\\\")\\n\",\n    \"    \\n\",\n    \"    if source_link is None:\\n\",\n    \"        return ''\\n\",\n    \"    \\n\",\n    \"    href = (\\n\",\n    \"        source_link.replace(\\\"/blob/main/\\\", f\\\"/blob/{lib_version}/\\\")\\n\",\n    \"        if lib_version.replace(\\\".\\\", \\\"\\\").isdigit()\\n\",\n    \"        else source_link\\n\",\n    \"    )\\n\",\n    \"    return f'<a href=\\\"{href}\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"880e2b1e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.7.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.7.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.7.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"class FixtureClass:\\n\",\n    \"    def __init__(self, attribute):\\n\",\n    \"        self.attribute = attribute\\n\",\n    \"    \\n\",\n    \"    @property\\n\",\n    \"    def property_attribute(self):\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    @classmethod\\n\",\n    \"    def class_method(cls):\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    def instance_method(self):\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    members = getmembers(FixtureClass, lambda a : isfunction(a) or ismethod(a))\\n\",\n    \"    lib_version = \\\"0.7.0\\\"\\n\",\n    \"    for m in members:\\n\",\n    \"        actual = _get_symbol_source_link(m[1], lib_version)\\n\",\n    \"        print(actual)\\n\",\n    \"        expected = '''<a href=\\\"https://github.com/airtai/fastkafka/blob/0.7.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>'''\\n\",\n    \"        assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"453ec902\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    members = getmembers(FixtureClass, lambda a : isfunction(a) or ismethod(a))\\n\",\n    \"    lib_version = \\\"0.8.0rc0\\\"\\n\",\n    \"    for m in members:\\n\",\n    \"        actual = _get_symbol_source_link(m[1], lib_version)\\n\",\n    \"        print(actual)\\n\",\n    \"        expected = '''<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>'''\\n\",\n    \"        assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d0c27057\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    members = getmembers(FixtureClass, lambda a : isfunction(a) or ismethod(a))\\n\",\n    \"    lib_version = \\\"dev\\\"\\n\",\n    \"    for m in members:\\n\",\n    \"        actual = _get_symbol_source_link(m[1], lib_version)\\n\",\n    \"        print(actual)\\n\",\n    \"        expected = '''<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>'''\\n\",\n    \"        assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9e14309e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_method_type(symbol: Type) -> str:\\n\",\n    \"    try:\\n\",\n    \"        source = getsource(symbol).strip()\\n\",\n    \"    except (TypeError, OSError) as e:\\n\",\n    \"        return \\\"\\\"\\n\",\n    \"\\n\",\n    \"    first_line = source.split(\\\"\\\\n\\\")[0]\\n\",\n    \"    return (\\n\",\n    \"        f\\\"{first_line}\\\\n\\\"\\n\",\n    \"        if first_line\\n\",\n    \"        in [\\\"@abstractmethod\\\", \\\"@staticmethod\\\", \\\"@classmethod\\\", \\\"@property\\\"]\\n\",\n    \"        else \\\"\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_symbol_definition(symbol: Type, header_level: int, lib_version: str) -> str:\\n\",\n    \"    \\\"\\\"\\\"Return the definition of a given symbol.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        symbol: A function or method object to get the definition for.\\n\",\n    \"        header_level: The level of the markdown header to append.\\n\",\n    \"        lib_version: The current version of the library.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A string representing the function definition\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if isclass(symbol):\\n\",\n    \"        return f\\\"{'#'*(header_level - 1)} {symbol.__module__}.{symbol.__name__} {{#{symbol.__module__}.{symbol.__name__}}}\\\\n\\\\n{_get_symbol_source_link(symbol, lib_version)}\\\\n\\\\n\\\"\\n\",\n    \"\\n\",\n    \"    if isinstance(symbol, property):\\n\",\n    \"        symbol = symbol.fget\\n\",\n    \"\\n\",\n    \"    symbol_anchor = (\\n\",\n    \"        f\\\"{'#' * header_level} {symbol.__name__}\\\"\\n\",\n    \"        + f\\\" {{#{symbol.__module__}.{'.'.join([component.strip('_') for component in symbol.__qualname__.rsplit('.', 1)])}}}\\\\n\\\\n\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    link_to_source = f\\\"{_get_symbol_source_link(symbol, lib_version)}\\\\n\\\\n\\\"\\n\",\n    \"\\n\",\n    \"    _signature = signature(symbol)\\n\",\n    \"    parameters = _get_parameters(_signature)\\n\",\n    \"    symbol_definition = f\\\"```py\\\\n{_get_method_type(symbol)}{_format_symbol_definition(symbol, parameters)}```\\\\n\\\"\\n\",\n    \"    return symbol_anchor + link_to_source + symbol_definition\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"649653b0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"#### __main__.MyClass {#__main__.MyClass}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"'##### __init__ {#__main__.MyClass.init}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n__init__(\\\\n    self\\\\n)\\\\n```\\\\n'\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"##### class_method {#__main__.MyClass.class_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@classmethod\\n\",\n      \"class_method(\\n\",\n      \"    a\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"##### abstract_method {#__main__.MyClass.abstract_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@abstractmethod\\n\",\n      \"abstract_method(\\n\",\n      \"    self, xyz\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"##### static_method {#__main__.MyClass.static_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@staticmethod\\n\",\n      \"static_method(\\n\",\n      \"    x\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"##### instance_method {#__main__.MyClass.instance_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"instance_method(\\n\",\n      \"    self, a, b, c\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"##### property_attribute {#__main__.MyClass.property_attribute}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@property\\n\",\n      \"property_attribute(\\n\",\n      \"    self, a\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_decorator(func):\\n\",\n    \"    @functools.wraps(func)\\n\",\n    \"    def wrapped_func():\\n\",\n    \"        func()\\n\",\n    \"    return wrapped_func\\n\",\n    \"\\n\",\n    \"class MyClass:\\n\",\n    \"    attribute = \\\"Some Attribute\\\"\\n\",\n    \"    \\n\",\n    \"    def __init__(self):\\n\",\n    \"        \\\"\\\"\\\"__init__ documentation\\\"\\\"\\\"\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @fixture_decorator\\n\",\n    \"    def instance_method(self, a, b, c):\\n\",\n    \"        \\\"\\\"\\\"instance_method documentation\\\"\\\"\\\"\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @property\\n\",\n    \"    def property_attribute(self, a):\\n\",\n    \"        \\\"\\\"\\\"property_attribute documentation\\\"\\\"\\\"\\n\",\n    \"        return self.attribute\\n\",\n    \"\\n\",\n    \"    @classmethod\\n\",\n    \"    @fixture_decorator\\n\",\n    \"    def class_method(cls, a):\\n\",\n    \"        \\\"\\\"\\\"class_method documentation\\\"\\\"\\\"\\n\",\n    \"        return cls.class_variable\\n\",\n    \"\\n\",\n    \"    @staticmethod\\n\",\n    \"    def static_method(x):\\n\",\n    \"        \\\"\\\"\\\"static_method documentation\\\"\\\"\\\"\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @abstractmethod\\n\",\n    \"    def abstract_method(self, xyz):\\n\",\n    \"        \\\"\\\"\\\"abstract_method documentation\\\"\\\"\\\"\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_symbol_definition(MyClass, 5, \\\"dev\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == '#### __main__.MyClass {#__main__.MyClass}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n'\\n\",\n    \"    \\n\",\n    \"    actual = _get_symbol_definition(MyClass.__init__, 5, \\\"1.0.0\\\")\\n\",\n    \"    display(actual)\\n\",\n    \"    assert (\\n\",\n    \"        actual\\n\",\n    \"        == '''##### __init__ {#__main__.MyClass.init}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n__init__(\\\\n    self\\\\n)\\\\n```\\\\n'''\\n\",\n    \"    )\\n\",\n    \"    \\n\",\n    \"    \\n\",\n    \"    actual = _get_symbol_definition(MyClass.class_method, 5, \\\"1.0.0\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    assert (\\n\",\n    \"        actual\\n\",\n    \"        == '''##### class_method {#__main__.MyClass.class_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@classmethod\\\\nclass_method(\\\\n    a\\\\n)\\\\n```\\\\n'''\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    actual = _get_symbol_definition(MyClass.abstract_method, 5, \\\"0.2.0rc0\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    assert (\\n\",\n    \"        actual\\n\",\n    \"        == '''##### abstract_method {#__main__.MyClass.abstract_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@abstractmethod\\\\nabstract_method(\\\\n    self, xyz\\\\n)\\\\n```\\\\n'''\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    actual = _get_symbol_definition(MyClass.static_method, 5, \\\"0.7.0dev\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    assert (\\n\",\n    \"        actual\\n\",\n    \"        == '''##### static_method {#__main__.MyClass.static_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@staticmethod\\\\nstatic_method(\\\\n    x\\\\n)\\\\n```\\\\n'''\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    actual = _get_symbol_definition(MyClass.instance_method, 5, \\\"1.0.1-rc.0.1\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    assert (\\n\",\n    \"        actual\\n\",\n    \"        == '''##### instance_method {#__main__.MyClass.instance_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\ninstance_method(\\\\n    self, a, b, c\\\\n)\\\\n```\\\\n'''\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    actual = _get_symbol_definition(MyClass.property_attribute, 5, \\\"1.0\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    assert (\\n\",\n    \"        actual\\n\",\n    \"        == '''##### property_attribute {#__main__.MyClass.property_attribute}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@property\\\\nproperty_attribute(\\\\n    self, a\\\\n)\\\\n```\\\\n'''\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"55f76f70\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"##### fixture_function {#__main__.fixture_function}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.7.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"fixture_function()\\n\",\n      \"```\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function() -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_symbol_definition(fixture_function, 5, \\\"0.7.0\\\")\\n\",\n    \"    expected = \\\"\\\"\\\"    ##### fixture_function {#__main__.fixture_function}\\n\",\n    \"\\n\",\n    \"    <a href=\\\"https://github.com/airtai/fastkafka/blob/0.7.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n    \"\\n\",\n    \"    ```py\\n\",\n    \"    fixture_function()\\n\",\n    \"    ```\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == textwrap.dedent(expected)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a667280a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"### fixture_function {#__main__.fixture_function}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"fixture_function(\\n\",\n      \"    arg_1, arg_2\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"TestCallable = Callable[[BaseModel], Union[Awaitable[None], None]]\\n\",\n    \"\\n\",\n    \"def fixture_function(arg_1: str, arg_2) -> TestCallable:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_symbol_definition(fixture_function, 3, \\\"1.0\\\")\\n\",\n    \"    expected = \\\"\\\"\\\"    ### fixture_function {#__main__.fixture_function}\\n\",\n    \"    \\n\",\n    \"    <a href=\\\"https://github.com/airtai/fastkafka/blob/1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n    \"\\n\",\n    \"    ```py\\n\",\n    \"    fixture_function(\\n\",\n    \"        arg_1, arg_2\\n\",\n    \"    )\\n\",\n    \"    ```\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == textwrap.dedent(expected)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2e084d12\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"### fixture_function {#__main__.fixture_function}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"fixture_function(\\n\",\n      \"    api_version='auto',\\n\",\n      \"    acks=<object object>,\\n\",\n      \"    max_poll_interval_ms=300000,\\n\",\n      \"    partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\\n\",\n      \"    isolation_level='read_uncommitted',\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"_object = object()\\n\",\n    \"\\n\",\n    \"def fixture_function(\\n\",\n    \"    api_version=\\\"auto\\\",\\n\",\n    \"    acks=_object,\\n\",\n    \"    max_poll_interval_ms=300000,\\n\",\n    \"    partition_assignment_strategy=(RoundRobinPartitionAssignor,),\\n\",\n    \"    isolation_level=\\\"read_uncommitted\\\") -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"\\n\",\n    \"    actual = _get_symbol_definition(fixture_function, 3, \\\"dev\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual is not None\\n\",\n    \"    assert '''<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>''' in actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"184e1deb\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"# fixture_function {#__main__.fixture_function}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"fixture_function(\\n\",\n      \"    arg_1,\\n\",\n      \"    arg_2='default_string',\\n\",\n      \"    arg_3={},\\n\",\n      \"    arg_4=None,\\n\",\n      \"    arg_5=(1, 'string', 2.0),\\n\",\n      \"    arg_6=[1, 'string'],\\n\",\n      \"    arg_7={1, 2, 3},\\n\",\n      \"    arg_8='string',\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(\\n\",\n    \"    arg_1: int,\\n\",\n    \"    arg_2: str = \\\"default_string\\\",\\n\",\n    \"    arg_3: Dict[str, int] = {},\\n\",\n    \"    arg_4: Optional[float] = None,\\n\",\n    \"    arg_5: Tuple[int, str, float] = (1, \\\"string\\\", 2.0),\\n\",\n    \"    arg_6: List[Union[int, str]] = [1, \\\"string\\\"],\\n\",\n    \"    arg_7: Set[int] = {1, 2, 3},\\n\",\n    \"    arg_8: Union[int, str] = \\\"string\\\",\\n\",\n    \") -> None:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_symbol_definition(fixture_function, 1, \\\"1.0\\\")\\n\",\n    \"    expected = \\\"\\\"\\\"    # fixture_function {#__main__.fixture_function}\\n\",\n    \"    \\n\",\n    \"    <a href=\\\"https://github.com/airtai/fastkafka/blob/1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n    \"\\n\",\n    \"    ```py\\n\",\n    \"    fixture_function(\\n\",\n    \"        arg_1,\\n\",\n    \"        arg_2='default_string',\\n\",\n    \"        arg_3={},\\n\",\n    \"        arg_4=None,\\n\",\n    \"        arg_5=(1, 'string', 2.0),\\n\",\n    \"        arg_6=[1, 'string'],\\n\",\n    \"        arg_7={1, 2, 3},\\n\",\n    \"        arg_8='string',\\n\",\n    \"    )\\n\",\n    \"    ```\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == textwrap.dedent(expected)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4e19a478\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _is_method(symbol: Type) -> bool:\\n\",\n    \"    \\\"\\\"\\\"Check if the given symbol is a method.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        symbol: A function or method object to check.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A boolean indicating whether the symbol is a method.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return (\\n\",\n    \"        ismethod(symbol)\\n\",\n    \"        or isfunction(symbol)\\n\",\n    \"        or isinstance(symbol, property)\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"55e4666d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert _is_method(MyClass.instance_method)\\n\",\n    \"assert _is_method(MyClass.static_method)\\n\",\n    \"assert _is_method(MyClass.class_method)\\n\",\n    \"assert _is_method(MyClass.abstract_method)\\n\",\n    \"assert _is_method(MyClass.property_attribute)\\n\",\n    \"assert not _is_method(MyClass.attribute)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"60e81719\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _get_formatted_docstring_for_symbol(\\n\",\n    \"    symbol: Type,\\n\",\n    \"    lib_version: str,\\n\",\n    \"    header_level: int = 2\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Recursively parses and get formatted docstring of a symbol.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        symbol: A Python class or function object to parse the docstring for.\\n\",\n    \"        lib_version: The current version of the library.\\n\",\n    \"        header_level: The level of the markdown header to append.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A formatted docstring of the symbol and its members.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    def traverse(symbol: Type, contents: str, header_level: int, lib_version: str) -> str:\\n\",\n    \"        \\\"\\\"\\\"Recursively traverse the members of a symbol and append their docstrings to the provided contents string.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            symbol: A Python class or function object to parse the docstring for.\\n\",\n    \"            contents: The current formatted docstrings.\\n\",\n    \"            header_level: The level of the markdown header to append.\\n\",\n    \"            lib_version: The current version of the library.\\n\",\n    \"\\n\",\n    \"        Returns:\\n\",\n    \"            The updated formatted docstrings.\\n\",\n    \"\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        for x, y in getmembers(symbol):\\n\",\n    \"            if not x.startswith(\\\"_\\\") or x == \\\"__init__\\\":\\n\",\n    \"                if _is_method(y):\\n\",\n    \"                    contents += f\\\"{_get_symbol_definition(y, header_level, lib_version)}\\\\n{_docstring_to_markdown(y)}\\\"\\n\",\n    \"                elif isclass(y) and not x.startswith(\\\"_\\\"):\\n\",\n    \"                    contents += f\\\"{_get_symbol_definition(y, header_level+1, lib_version)}\\\\n{_docstring_to_markdown(y)}\\\"\\n\",\n    \"                    contents = traverse(y, contents, header_level+1, lib_version)\\n\",\n    \"        return contents\\n\",\n    \"\\n\",\n    \"    contents = f\\\"{_get_symbol_definition(symbol, header_level+1, lib_version)}\\\\n{_docstring_to_markdown(symbol)}\\\"\\n\",\n    \"    if isclass(symbol):\\n\",\n    \"        contents = traverse(symbol, contents, header_level+1, lib_version)\\n\",\n    \"    return contents\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"28dfdda9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"## __main__.FixtureClass {#__main__.FixtureClass}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"### __init__ {#__main__.FixtureClass.init}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"__init__(\\n\",\n      \"    self, attribute\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"### abstract_method {#__main__.FixtureClass.abstract_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@abstractmethod\\n\",\n      \"abstract_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"### class_method {#__main__.FixtureClass.class_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@classmethod\\n\",\n      \"class_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"### instance_method {#__main__.FixtureClass.instance_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"instance_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"### property_attribute {#__main__.FixtureClass.property_attribute}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@property\\n\",\n      \"property_attribute(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"### static_method {#__main__.FixtureClass.static_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@staticmethod\\n\",\n      \"static_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"class FixtureClass:\\n\",\n    \"    def __init__(self, attribute):\\n\",\n    \"        self.attribute = attribute\\n\",\n    \"        \\n\",\n    \"    @property\\n\",\n    \"    def property_attribute(self):\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    @classmethod\\n\",\n    \"    def class_method(cls):\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    @staticmethod\\n\",\n    \"    def static_method():\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    def instance_method(self):\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    @abstractmethod\\n\",\n    \"    def abstract_method(self):\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_formatted_docstring_for_symbol(FixtureClass, \\\"0.1.0\\\")\\n\",\n    \"    expected = '''## __main__.FixtureClass {#__main__.FixtureClass}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n\\\\n### __init__ {#__main__.FixtureClass.init}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n__init__(\\\\n    self, attribute\\\\n)\\\\n```\\\\n\\\\n### abstract_method {#__main__.FixtureClass.abstract_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@abstractmethod\\\\nabstract_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\n### class_method {#__main__.FixtureClass.class_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@classmethod\\\\nclass_method()\\\\n```\\\\n\\\\n### instance_method {#__main__.FixtureClass.instance_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\ninstance_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\n### property_attribute {#__main__.FixtureClass.property_attribute}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@property\\\\nproperty_attribute(\\\\n    self\\\\n)\\\\n```\\\\n\\\\n### static_method {#__main__.FixtureClass.static_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@staticmethod\\\\nstatic_method()\\\\n```\\\\n\\\\n'''\\n\",\n    \"    print(actual)\\n\",\n    \"    \\n\",\n    \"    assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8aa4c21e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"## __main__.FixtureClass {#__main__.FixtureClass}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"### __main__.NestedClass {#__main__.NestedClass}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"NestedClass documentation\\n\",\n      \"\\n\",\n      \"#### __main__.NestedNestedClass {#__main__.NestedNestedClass}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"NestedNestedClass documentation\\n\",\n      \"\\n\",\n      \"##### nested_nested_method {#__main__.FixtureClass.NestedClass.NestedNestedClass.nested_nested_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"nested_nested_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"nested_nested_method documentation\\n\",\n      \"\\n\",\n      \"#### nested_method {#__main__.FixtureClass.NestedClass.nested_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"nested_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"nested_method documentation\\n\",\n      \"\\n\",\n      \"### __init__ {#__main__.FixtureClass.init}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"__init__(\\n\",\n      \"    self, attribute\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"__init__ documentation\\n\",\n      \"\\n\",\n      \"### abstract_method {#__main__.FixtureClass.abstract_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@abstractmethod\\n\",\n      \"abstract_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"abstract_method documentation\\n\",\n      \"\\n\",\n      \"### class_method {#__main__.FixtureClass.class_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@classmethod\\n\",\n      \"class_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"class_method documentation\\n\",\n      \"\\n\",\n      \"### instance_method {#__main__.FixtureClass.instance_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"instance_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"instance_method documentation\\n\",\n      \"\\n\",\n      \"### property_attribute {#__main__.FixtureClass.property_attribute}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@property\\n\",\n      \"property_attribute(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"property_attribute documentation\\n\",\n      \"\\n\",\n      \"### static_method {#__main__.FixtureClass.static_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@staticmethod\\n\",\n      \"static_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"static_method documentation\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"class FixtureClass:\\n\",\n    \"    class_variable = 10\\n\",\n    \"    \\n\",\n    \"    def __init__(self, attribute):\\n\",\n    \"        \\\"\\\"\\\"__init__ documentation\\\"\\\"\\\"\\n\",\n    \"        self.attribute = attribute\\n\",\n    \"    \\n\",\n    \"    @property\\n\",\n    \"    def property_attribute(self):\\n\",\n    \"        \\\"\\\"\\\"property_attribute documentation\\\"\\\"\\\"\\n\",\n    \"        return self.attribute\\n\",\n    \"    \\n\",\n    \"    @classmethod\\n\",\n    \"    def class_method(cls):\\n\",\n    \"        \\\"\\\"\\\"class_method documentation\\\"\\\"\\\"\\n\",\n    \"        return cls.class_variable\\n\",\n    \"    \\n\",\n    \"    @staticmethod\\n\",\n    \"    @fixture_decorator\\n\",\n    \"    def static_method():\\n\",\n    \"        \\\"\\\"\\\"static_method documentation\\\"\\\"\\\"\\n\",\n    \"        return \\\"This is a static method\\\"\\n\",\n    \"    \\n\",\n    \"    @fixture_decorator\\n\",\n    \"    def instance_method(self):\\n\",\n    \"        \\\"\\\"\\\"instance_method documentation\\\"\\\"\\\"\\n\",\n    \"        return \\\"This is an instance method\\\"\\n\",\n    \"    \\n\",\n    \"    def __str__(self):\\n\",\n    \"        \\\"\\\"\\\"__str__ documentation\\\"\\\"\\\"\\n\",\n    \"        return f\\\"MyClass instance with attribute: {self.attribute}\\\"\\n\",\n    \"    \\n\",\n    \"    @abstractmethod\\n\",\n    \"    def abstract_method(self):\\n\",\n    \"        \\\"\\\"\\\"abstract_method documentation\\\"\\\"\\\"\\n\",\n    \"        pass\\n\",\n    \"    \\n\",\n    \"    class NestedClass:\\n\",\n    \"        \\\"\\\"\\\"NestedClass documentation\\\"\\\"\\\"\\n\",\n    \"        def nested_method(self):\\n\",\n    \"            \\\"\\\"\\\"nested_method documentation\\\"\\\"\\\"\\n\",\n    \"            return \\\"This is a method in the nested class\\\"\\n\",\n    \"        \\n\",\n    \"        class NestedNestedClass:\\n\",\n    \"            \\\"\\\"\\\"NestedNestedClass documentation\\\"\\\"\\\"\\n\",\n    \"            \\n\",\n    \"            def nested_nested_method(self):\\n\",\n    \"                \\\"\\\"\\\"nested_nested_method documentation\\\"\\\"\\\"\\n\",\n    \"                return \\\"This is a method in the nested_nested class\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_formatted_docstring_for_symbol(FixtureClass, \\\"0.1.0\\\")\\n\",\n    \"    expected = '''## __main__.FixtureClass {#__main__.FixtureClass}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n\\\\n### __main__.NestedClass {#__main__.NestedClass}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n\\\\nNestedClass documentation\\\\n\\\\n#### __main__.NestedNestedClass {#__main__.NestedNestedClass}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n\\\\nNestedNestedClass documentation\\\\n\\\\n##### nested_nested_method {#__main__.FixtureClass.NestedClass.NestedNestedClass.nested_nested_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\nnested_nested_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\nnested_nested_method documentation\\\\n\\\\n#### nested_method {#__main__.FixtureClass.NestedClass.nested_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\nnested_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\nnested_method documentation\\\\n\\\\n### __init__ {#__main__.FixtureClass.init}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n__init__(\\\\n    self, attribute\\\\n)\\\\n```\\\\n\\\\n__init__ documentation\\\\n\\\\n### abstract_method {#__main__.FixtureClass.abstract_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@abstractmethod\\\\nabstract_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\nabstract_method documentation\\\\n\\\\n### class_method {#__main__.FixtureClass.class_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@classmethod\\\\nclass_method()\\\\n```\\\\n\\\\nclass_method documentation\\\\n\\\\n### instance_method {#__main__.FixtureClass.instance_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\ninstance_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\ninstance_method documentation\\\\n\\\\n### property_attribute {#__main__.FixtureClass.property_attribute}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@property\\\\nproperty_attribute(\\\\n    self\\\\n)\\\\n```\\\\n\\\\nproperty_attribute documentation\\\\n\\\\n### static_method {#__main__.FixtureClass.static_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@staticmethod\\\\nstatic_method()\\\\n```\\\\n\\\\nstatic_method documentation\\\\n\\\\n'''\\n\",\n    \"    print(actual)\\n\",\n    \"    \\n\",\n    \"    assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bb2ae51d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"### fixture_function {#__main__.fixture_function}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"fixture_function(\\n\",\n      \"    arg_1, arg_2, arg_3, arg_4=None\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"This is a one line description for the function\\n\",\n      \"\\n\",\n      \"**Parameters**:\\n\",\n      \"\\n\",\n      \"|  Name | Type | Description | Default |\\n\",\n      \"|---|---|---|---|\\n\",\n      \"| `arg_1` | `str` | Argument 1 | *required* |\\n\",\n      \"| `arg_2` | `Union[List[str], str]` | Argument 2 | *required* |\\n\",\n      \"| `arg_3` | `Optional[int]` | Argument 3 | *required* |\\n\",\n      \"| `arg_4` | `Optional[str]` | Argument 4 | `None` |\\n\",\n      \"\\n\",\n      \"**Returns**:\\n\",\n      \"\\n\",\n      \"|  Type | Description |\\n\",\n      \"|---|---|\\n\",\n      \"| `str` | The concatinated string |\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(\\n\",\n    \"    arg_1: str,\\n\",\n    \"    arg_2: Union[List[str], str],\\n\",\n    \"    arg_3: Optional[int],\\n\",\n    \"    arg_4: Optional[str] = None,\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"This is a one line description for the function\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        arg_1: Argument 1\\n\",\n    \"        arg_2: Argument 2\\n\",\n    \"        arg_3: Argument 3\\n\",\n    \"        arg_4: Argument 4\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The concatinated string\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"_optional_str_type = \\\"Union[str, NoneType]\\\" if f\\\"{sys.version_info.major}.{sys.version_info.minor}\\\" == \\\"3.8\\\" else \\\"Optional[str]\\\"\\n\",\n    \"_optional_int_type = \\\"Union[int, NoneType]\\\" if f\\\"{sys.version_info.major}.{sys.version_info.minor}\\\" == \\\"3.8\\\" else \\\"Optional[int]\\\"\\n\",\n    \"expected = \\\"### fixture_function {#__main__.fixture_function}\\\"+f\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n    \"\\n\",\n    \"```py\\n\",\n    \"fixture_function(\\n\",\n    \"    arg_1, arg_2, arg_3, arg_4=None\\n\",\n    \")\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This is a one line description for the function\\n\",\n    \"\\n\",\n    \"**Parameters**:\\n\",\n    \"\\n\",\n    \"|  Name | Type | Description | Default |\\n\",\n    \"|---|---|---|---|\\n\",\n    \"| `arg_1` | `str` | Argument 1 | *required* |\\n\",\n    \"| `arg_2` | `Union[List[str], str]` | Argument 2 | *required* |\\n\",\n    \"| `arg_3` | `{_optional_int_type}` | Argument 3 | *required* |\\n\",\n    \"| `arg_4` | `{_optional_str_type}` | Argument 4 | `None` |\\n\",\n    \"\\n\",\n    \"**Returns**:\\n\",\n    \"\\n\",\n    \"|  Type | Description |\\n\",\n    \"|---|---|\\n\",\n    \"| `str` | The concatinated string |\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_formatted_docstring_for_symbol(fixture_function, \\\"0.1.1rc0\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"\\n\",\n    \"    assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"949b3aa3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"### fixture_function {#__main__.fixture_function}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"fixture_function(\\n\",\n      \"    arg_1, arg_2, arg_3=12\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def fixture_function(\\n\",\n    \"    arg_1: str,\\n\",\n    \"    arg_2: Union[List[str], str],\\n\",\n    \"    arg_3: Optional[int] = 12,\\n\",\n    \") -> str:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"expected = \\\"### fixture_function {#__main__.fixture_function}\\\"+f\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n    \"\\n\",\n    \"```py\\n\",\n    \"fixture_function(\\n\",\n    \"    arg_1, arg_2, arg_3=12\\n\",\n    \")\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with mock_custom_nbdev_lookup():\\n\",\n    \"    actual = _get_formatted_docstring_for_symbol(fixture_function, \\\"0.1.1rc0\\\")\\n\",\n    \"    print(actual)\\n\",\n    \"\\n\",\n    \"    assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ae47ed11\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _convert_html_style_attribute_to_jsx(contents: str) -> str:\\n\",\n    \"    \\\"\\\"\\\"Converts the inline style attributes in an HTML string to JSX compatible format.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        contents: A string containing an HTML document or fragment.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A string with inline style attributes converted to JSX compatible format.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    style_regex = re.compile(r'style=\\\"(.+?)\\\"')\\n\",\n    \"    style_matches = style_regex.findall(contents)\\n\",\n    \"\\n\",\n    \"    for style_match in style_matches:\\n\",\n    \"        style_dict = {}\\n\",\n    \"        styles = style_match.split(\\\";\\\")\\n\",\n    \"        for style in styles:\\n\",\n    \"            key_value = style.split(\\\":\\\")\\n\",\n    \"            if len(key_value) == 2:\\n\",\n    \"                key = re.sub(\\n\",\n    \"                    r\\\"-(.)\\\", lambda m: m.group(1).upper(), key_value[0].strip()\\n\",\n    \"                )\\n\",\n    \"                value = key_value[1].strip().replace(\\\"'\\\", '\\\"')\\n\",\n    \"                style_dict[key] = value\\n\",\n    \"        replacement = \\\"style={{\\\"\\n\",\n    \"        for key, value in style_dict.items():\\n\",\n    \"            replacement += f\\\"{key}: '{value}', \\\"\\n\",\n    \"        replacement = replacement[:-2] + \\\"}}\\\"\\n\",\n    \"        contents = contents.replace(f'style=\\\"{style_match}\\\"', replacement)\\n\",\n    \"\\n\",\n    \"    return contents\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bcaa9e96\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"************************************************************************************************************************\\n\",\n      \"<a\\n\",\n      \"href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/test_dependencies.py#L28\\\"\\n\",\n      \"target=\\\"_blank\\\" style={{float: 'right', fontSize: 'smaller'}}>source</a> some text goes here <a\\n\",\n      \"href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/test_dependencies.py#L28\\\"\\n\",\n      \"target=\\\"_blank\\\" style={{float: 'right', fontSize: 'smaller'}}>source</a>\\n\",\n      \"************************************************************************************************************************\\n\",\n      \"<span style={{color: 'red'}}>Test</span>\\n\",\n      \"************************************************************************************************************************\\n\",\n      \"<div style={{backgroundColor: 'blue', border: '1px solid black'}}>Test</div>\\n\",\n      \"************************************************************************************************************************\\n\",\n      \"<span style={{fontSize: '1.2rem'}}>Test</span>\\n\",\n      \"************************************************************************************************************************\\n\",\n      \"<pre style={{whiteSpace: 'pre', overflowX: 'auto', lineHeight: 'normal', fontFamily: 'Menlo,\\\"DejaVu Sans Mono\\\",consolas,\\\"Courier New\\\",monospace'}}></pre>\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixtures = [\\n\",\n    \"    {\\n\",\n    \"        \\\"input\\\": \\\"\\\"\\\"<a\\n\",\n    \"href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/test_dependencies.py#L28\\\"\\n\",\n    \"target=\\\"_blank\\\" style=\\\"float:right; font-size:smaller\\\">source</a> some text goes here <a\\n\",\n    \"href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/test_dependencies.py#L28\\\"\\n\",\n    \"target=\\\"_blank\\\" style=\\\"float:right; font-size:smaller\\\">source</a>\\\"\\\"\\\",\\n\",\n    \"        \\\"expected\\\": \\\"\\\"\\\"<a\\n\",\n    \"href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/test_dependencies.py#L28\\\"\\n\",\n    \"target=\\\"_blank\\\" style={{float: 'right', fontSize: 'smaller'}}>source</a> some text goes here <a\\n\",\n    \"href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/test_dependencies.py#L28\\\"\\n\",\n    \"target=\\\"_blank\\\" style={{float: 'right', fontSize: 'smaller'}}>source</a>\\\"\\\"\\\",\\n\",\n    \"    },\\n\",\n    \"    {\\n\",\n    \"        \\\"input\\\": '<span style=\\\"color: red;\\\">Test</span>',\\n\",\n    \"        \\\"expected\\\": \\\"<span style={{color: 'red'}}>Test</span>\\\",\\n\",\n    \"    },\\n\",\n    \"    {\\n\",\n    \"        \\\"input\\\": '<div style=\\\"background-color: blue; border: 1px solid black;\\\">Test</div>',\\n\",\n    \"        \\\"expected\\\": \\\"<div style={{backgroundColor: 'blue', border: '1px solid black'}}>Test</div>\\\",\\n\",\n    \"    },\\n\",\n    \"    {\\n\",\n    \"        \\\"input\\\": '<span style=\\\"font-size: 1.2rem;\\\">Test</span>',\\n\",\n    \"        \\\"expected\\\": \\\"<span style={{fontSize: '1.2rem'}}>Test</span>\\\",\\n\",\n    \"    },\\n\",\n    \"    {\\n\",\n    \"        \\\"input\\\": \\\"\\\"\\\"<pre style=\\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\\"></pre>\\\"\\\"\\\",\\n\",\n    \"        \\\"expected\\\": \\\"\\\"\\\"<pre style={{whiteSpace: 'pre', overflowX: 'auto', lineHeight: 'normal', fontFamily: 'Menlo,\\\"DejaVu Sans Mono\\\",consolas,\\\"Courier New\\\",monospace'}}></pre>\\\"\\\"\\\",\\n\",\n    \"    },\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"for fixture in fixtures:\\n\",\n    \"    actual = _convert_html_style_attribute_to_jsx(fixture[\\\"input\\\"])\\n\",\n    \"    print(\\\"*\\\" * 120)\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == fixture[\\\"expected\\\"], fixture[\\\"expected\\\"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7c8fa05a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_all_markdown_files_path(docs_path: Path) -> List[Path]:\\n\",\n    \"    \\\"\\\"\\\"Get all Markdown files in a directory and its subdirectories.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        directory: The path to the directory to search in.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of paths to all Markdown files found in the directory and its subdirectories.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    markdown_files = [file_path for file_path in docs_path.glob(\\\"**/*.md\\\")]\\n\",\n    \"    return markdown_files\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dc72d20f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[Path('/tmp/tmpbyeo8sb_/docusaurus/docs/file.md'), Path('/tmp/tmpbyeo8sb_/docusaurus/docs/api/file.md'), Path('/tmp/tmpbyeo8sb_/docusaurus/docs/api/fastKafka/file.md'), Path('/tmp/tmpbyeo8sb_/docusaurus/docs/blog/file.md')]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    module_name = \\\"fastkafka\\\"\\n\",\n    \"\\n\",\n    \"    docs_path = Path(d) / \\\"docusaurus\\\" / \\\"docs\\\"\\n\",\n    \"    docs_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    api_path = docs_path / \\\"api\\\"\\n\",\n    \"    api_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    blog_path = docs_path / \\\"blog\\\"\\n\",\n    \"    blog_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    nested_api_path = api_path / \\\"fastKafka\\\"\\n\",\n    \"    nested_api_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    for p in [docs_path, api_path, blog_path, nested_api_path]:\\n\",\n    \"        with open((p / \\\"file.md\\\"), \\\"w\\\") as f:\\n\",\n    \"            f.write(\\\"sample text\\\")\\n\",\n    \"\\n\",\n    \"    actual = _get_all_markdown_files_path(docs_path)\\n\",\n    \"    expected = [\\n\",\n    \"        Path(docs_path) / \\\"file.md\\\",\\n\",\n    \"        Path(api_path) / \\\"file.md\\\",\\n\",\n    \"        Path(nested_api_path) / \\\"file.md\\\",\\n\",\n    \"        Path(blog_path) / \\\"file.md\\\",\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    print(actual)\\n\",\n    \"    assert sorted(actual) == sorted(expected), expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2bc75616\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _fix_special_symbols_in_html(contents: str) -> str:\\n\",\n    \"    contents = contents.replace(\\\"”\\\", '\\\"')\\n\",\n    \"    return contents\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dd675ce3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"<a href=\\\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\\\" target=\\\"_blank\\\">\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"<a href=\\\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\\\" target=”_blank”>\\\"\\\"\\\"\\n\",\n    \"expected = \\\"\\\"\\\"<a href=\\\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\\\" target=\\\"_blank\\\">\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"actual = _fix_special_symbols_in_html(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"68923a9c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _add_file_extension_to_link(url: str) -> str:\\n\",\n    \"    \\\"\\\"\\\"Add file extension to the last segment of a URL\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        url: A URL string.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A string of the updated URL with a file extension added to the last segment of the URL.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    segments = url.split(\\\"/#\\\")[0].split(\\\"/\\\")[-2:]\\n\",\n    \"    return url.replace(f\\\"/{segments[1]}\\\", f\\\"/{segments[1]}.md\\\").replace(\\\".md/#\\\", \\\".md#\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"00195964\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"https://airtai.github.io/fastkafka/api/fastkafka/FastKafka.md#fastkafka.FastKafka\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"https://airtai.github.io/fastkafka/api/fastkafka/FastKafka/#fastkafka.FastKafka\\\"\\n\",\n    \"expected = \\\"https://airtai.github.io/fastkafka/api/fastkafka/FastKafka.md#fastkafka.FastKafka\\\"\\n\",\n    \"\\n\",\n    \"actual = _add_file_extension_to_link(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4afbe422\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"https://airtai.github.io/fastkafka/api/fastkafka/testing/ApacheKafkaBroker.md#fastkafka.testing.ApacheKafkaBroker\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"https://airtai.github.io/fastkafka/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker\\\"\\n\",\n    \"expected = \\\"https://airtai.github.io/fastkafka/api/fastkafka/testing/ApacheKafkaBroker.md#fastkafka.testing.ApacheKafkaBroker\\\"\\n\",\n    \"\\n\",\n    \"actual = _add_file_extension_to_link(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c5540873\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"https://github.com/airtai/sample_fastkafka_with_redpanda.md\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"https://github.com/airtai/sample_fastkafka_with_redpanda\\\"\\n\",\n    \"expected = \\\"https://github.com/airtai/sample_fastkafka_with_redpanda.md\\\"\\n\",\n    \"\\n\",\n    \"actual = _add_file_extension_to_link(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b40ecb47\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _generate_production_url(url: str) -> str:\\n\",\n    \"    \\\"\\\"\\\"Generate a Docusaurus compatible production URL for the given symbol URL.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        url: The symbol URL to be converted.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The production URL of the symbol.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    url_segment, hash_segment = url.split(\\\".md\\\")\\n\",\n    \"    url_split = url_segment.split(\\\"/\\\")\\n\",\n    \"    if url_split[-1].lower() == url_split[-2].lower():\\n\",\n    \"        return \\\"/\\\".join(url_split[:-1]) + hash_segment\\n\",\n    \"    return url.replace(\\\".md\\\", \\\"\\\")\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5b3bb73a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka\\n\",\n      \"https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"url = \\\"https://airtai.github.io/fastkafka/docs/api/fastkafka/FastKafka.md#fastkafka.FastKafka\\\"\\n\",\n    \"expected = \\\"https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka\\\"\\n\",\n    \"\\n\",\n    \"actual = _generate_production_url(url)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\\n\",\n    \"\\n\",\n    \"expected = \\\"https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester\\\"\\n\",\n    \"actual = _generate_production_url(\\\"https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester.md#fastkafka.testing.Tester\\\")\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"82d131ac\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _fix_symbol_links(\\n\",\n    \"    contents: str,\\n\",\n    \"    dir_prefix: str,\\n\",\n    \"    doc_host: str,\\n\",\n    \"    doc_baseurl: str,\\n\",\n    \"    use_relative_doc_links: bool = True,\\n\",\n    \") -> str:\\n\",\n    \"    \\\"\\\"\\\"Fix symbol links in Markdown content.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        contents: The Markdown content to search for symbol links.\\n\",\n    \"        dir_prefix: Directory prefix to append in the relative URL.\\n\",\n    \"        doc_host: The host URL for the documentation site.\\n\",\n    \"        doc_baseurl: The base URL for the documentation site.\\n\",\n    \"        use_relative_doc_links: If set to True, then the relative link to symbols will be added else,\\n\",\n    \"            production link will be added.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        str: The Markdown content with updated symbol links.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    prefix = re.escape(urljoin(doc_host + \\\"/\\\", doc_baseurl))\\n\",\n    \"    pattern = re.compile(rf\\\"\\\\[(.*?)\\\\]\\\\(({prefix}[^)]+)\\\\)\\\")\\n\",\n    \"    matches = pattern.findall(contents)\\n\",\n    \"    for match in matches:\\n\",\n    \"        old_url = match[1]\\n\",\n    \"        new_url = _add_file_extension_to_link(old_url).replace(\\\"/api/\\\", \\\"/docs/api/\\\")\\n\",\n    \"        if use_relative_doc_links:\\n\",\n    \"            dir_prefix = \\\"./\\\" if dir_prefix == \\\"\\\" else dir_prefix\\n\",\n    \"            updated_url = dir_prefix + new_url.split(\\\"/docs/\\\")[1]\\n\",\n    \"        else:\\n\",\n    \"            updated_url = _generate_production_url(\\n\",\n    \"                doc_host + doc_baseurl + \\\"/docs/\\\" + new_url.split(\\\"/docs/\\\")[1]\\n\",\n    \"            )\\n\",\n    \"        contents = contents.replace(old_url, updated_url)\\n\",\n    \"    return contents\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6d45779c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"In the above example,\\n\",\n      \"[`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\n\",\n      \"[`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\n\",\n      \"app is named as `kafka_app`\\n\",\n      \"[`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"In the above example,\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/0.5.0/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\n\",\n    \"app is named as `kafka_app`\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/0.5.0rc0/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"In the above example,\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\n\",\n    \"app is named as `kafka_app`\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"dir_prefix = \\\"../\\\"\\n\",\n    \"doc_host=\\\"https://airtai.github.io\\\"\\n\",\n    \"doc_baseurl=\\\"/fastkafka\\\"\\n\",\n    \"actual = _fix_symbol_links(fixture, dir_prefix, doc_host, doc_baseurl, use_relative_doc_links=False)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9bbfbe81\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"In the above example,\\n\",\n      \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"app is named as `kafka_app`\\n\",\n      \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"In the above example,\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/0.5.0/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\n\",\n    \"app is named as `kafka_app`\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/0.5.0rc0/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"In the above example,\\n\",\n    \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"app is named as `kafka_app`\\n\",\n    \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"dir_prefix = \\\"../\\\"\\n\",\n    \"doc_host=\\\"https://airtai.github.io\\\"\\n\",\n    \"doc_baseurl=\\\"/fastkafka\\\"\\n\",\n    \"actual = _fix_symbol_links(fixture, dir_prefix, doc_host, doc_baseurl)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"54241bdf\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"In the above example,\\n\",\n      \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"app is named as `kafka_app`\\n\",\n      \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"In the above example,\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/0.5.0/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\n\",\n    \"app is named as `kafka_app`\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/0.5.0rc0/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"In the above example,\\n\",\n    \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"app is named as `kafka_app`\\n\",\n    \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"dir_prefix = \\\"\\\"\\n\",\n    \"doc_host=\\\"https://airtai.github.io\\\"\\n\",\n    \"doc_baseurl=\\\"/fastkafka\\\"\\n\",\n    \"actual = _fix_symbol_links(fixture, dir_prefix, doc_host, doc_baseurl)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3a51204c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"The service can be tested using the\\n\",\n      \"[`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n      \"[`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n      \"[`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n      \"instance and we can start the Kafka\\n\",\n      \"broker locally using the\\n\",\n      \"[`ApacheKafkaBroker`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/ApacheKafkaBroker#fastkafka.testing.ApacheKafkaBroker).\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"The service can be tested using the\\n\",\n    \"[`Tester`](https://airtai.github.io/fastkafka/api/fastkafka/testing/Tester/#fastkafka.testing.Tester)\\n\",\n    \"[`Tester`](https://airtai.github.io/fastkafka/0.5.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester)\\n\",\n    \"[`Tester`](https://airtai.github.io/fastkafka/dev/api/fastkafka/testing/Tester/#fastkafka.testing.Tester)\\n\",\n    \"instance and we can start the Kafka\\n\",\n    \"broker locally using the\\n\",\n    \"[`ApacheKafkaBroker`](https://airtai.github.io/fastkafka/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker).\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"relative_url_False\\\": \\\"\\\"\\\"The service can be tested using the\\n\",\n    \"[`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n    \"[`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n    \"[`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n    \"instance and we can start the Kafka\\n\",\n    \"broker locally using the\\n\",\n    \"[`ApacheKafkaBroker`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/ApacheKafkaBroker#fastkafka.testing.ApacheKafkaBroker).\\\"\\\"\\\",\\n\",\n    \"    \\\"relative_url_True\\\": \\\"\\\"\\\"The service can be tested using the\\n\",\n    \"[`Tester`](./api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n    \"[`Tester`](./api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n    \"[`Tester`](./api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n    \"instance and we can start the Kafka\\n\",\n    \"broker locally using the\\n\",\n    \"[`ApacheKafkaBroker`](./api/fastkafka/testing/ApacheKafkaBroker.md#fastkafka.testing.ApacheKafkaBroker).\\\"\\\"\\\",\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"dir_prefix = \\\"\\\"\\n\",\n    \"doc_host = \\\"https://airtai.github.io\\\"\\n\",\n    \"doc_baseurl = \\\"/fastkafka\\\"\\n\",\n    \"\\n\",\n    \"for flag in [False]:\\n\",\n    \"    actual = _fix_symbol_links(fixture, dir_prefix, doc_host, doc_baseurl, flag)\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == expected[f\\\"relative_url_{flag}\\\"], expected[f\\\"relative_url_{flag}\\\"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"78e866b0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"This is not a link to a symbol: https://www.google.com\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"This is not a link to a symbol: https://www.google.com\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"This is not a link to a symbol: https://www.google.com\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"dir_prefix = \\\"\\\"\\n\",\n    \"doc_host=\\\"https://airtai.github.io\\\"\\n\",\n    \"doc_baseurl=\\\"/fastkafka\\\"\\n\",\n    \"actual = _fix_symbol_links(fixture, dir_prefix, doc_host, doc_baseurl)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d33bf47d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"A sample fastkafka-based library that uses Redpanda for testing, based\\n\",\n      \"on this guide, can be found\\n\",\n      \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"A sample fastkafka-based library that uses Redpanda for testing, based\\n\",\n    \"on this guide, can be found\\n\",\n    \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"A sample fastkafka-based library that uses Redpanda for testing, based\\n\",\n    \"on this guide, can be found\\n\",\n    \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"dir_prefix = \\\"\\\"\\n\",\n    \"doc_host=\\\"https://airtai.github.io\\\"\\n\",\n    \"doc_baseurl=\\\"/fastkafka\\\"\\n\",\n    \"actual = _fix_symbol_links(fixture, dir_prefix, doc_host, doc_baseurl)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"617b2f5d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"To learn more about Redpanda, please visit their\\n\",\n      \"[website](https://redpanda.com/) or checkout this [blog\\n\",\n      \"post](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\\n\",\n      \"comparing Redpanda and Kafka’s performance benchmarks.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"To learn more about Redpanda, please visit their\\n\",\n    \"[website](https://redpanda.com/) or checkout this [blog\\n\",\n    \"post](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\\n\",\n    \"comparing Redpanda and Kafka’s performance benchmarks.\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"To learn more about Redpanda, please visit their\\n\",\n    \"[website](https://redpanda.com/) or checkout this [blog\\n\",\n    \"post](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark)\\n\",\n    \"comparing Redpanda and Kafka’s performance benchmarks.\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"dir_prefix = \\\"\\\"\\n\",\n    \"doc_host=\\\"https://airtai.github.io\\\"\\n\",\n    \"doc_baseurl=\\\"/fastkafka\\\"\\n\",\n    \"actual = _fix_symbol_links(fixture, dir_prefix, doc_host, doc_baseurl)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"06c76c04\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _get_relative_url_prefix(docs_path: Path, sub_path: Path) -> str:\\n\",\n    \"    \\\"\\\"\\\"Returns a relative url prefix from a sub path to a docs path.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        docs_path (Path): The docs directory path.\\n\",\n    \"        sub_path (Path): The sub directory path.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        str: A string representing the relative path from the sub path to the docs path.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If the sub path is not a descendant of the docs path.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        relative_path = sub_path.relative_to(docs_path)\\n\",\n    \"    except ValueError:\\n\",\n    \"        raise ValueError(f\\\"{sub_path} is not a descendant of {docs_path}\\\")\\n\",\n    \"\\n\",\n    \"    return (\\n\",\n    \"        \\\"../\\\" * (len(relative_path.parts) - 1) if len(relative_path.parts) > 1 else \\\"\\\"\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0d0a4524\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\n\",\n      \"../\\n\",\n      \"../../\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"docs_path = Path('docusaurus/docs')\\n\",\n    \"\\n\",\n    \"sub_path = Path('docusaurus/docs/index.md')\\n\",\n    \"actual = _get_relative_url_prefix(docs_path, sub_path) \\n\",\n    \"print(actual)\\n\",\n    \"assert actual == \\\"\\\"\\n\",\n    \"\\n\",\n    \"sub_path = Path('docusaurus/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka.md')\\n\",\n    \"actual = _get_relative_url_prefix(docs_path, sub_path)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == \\\"../\\\"\\n\",\n    \"\\n\",\n    \"sub_path = Path('docusaurus/docs/guides/tutorial/fastkafka.md')\\n\",\n    \"actual = _get_relative_url_prefix(docs_path, sub_path)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == \\\"../../\\\"\\n\",\n    \"\\n\",\n    \"with pytest.raises(ValueError) as e:\\n\",\n    \"    sub_path = Path('mkdocs/docs/guides/tutorial/fastkafka.md')\\n\",\n    \"    _get_relative_url_prefix(docs_path, sub_path)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"769c8693\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def fix_invalid_syntax_in_markdown(docs_path: str) -> None:\\n\",\n    \"    \\\"\\\"\\\"Fix invalid HTML syntax in markdown files and converts inline style attributes to JSX-compatible format.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        docs_path: The path to the root directory to search for markdown files.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    cfg = get_config()\\n\",\n    \"    doc_host = cfg[\\\"doc_host\\\"]\\n\",\n    \"    doc_baseurl = cfg[\\\"doc_baseurl\\\"]\\n\",\n    \"\\n\",\n    \"    markdown_files = _get_all_markdown_files_path(Path(docs_path))\\n\",\n    \"    for file in markdown_files:\\n\",\n    \"        relative_url_prefix = _get_relative_url_prefix(Path(docs_path), file)\\n\",\n    \"        contents = Path(file).read_text()\\n\",\n    \"\\n\",\n    \"        contents = _convert_html_style_attribute_to_jsx(contents)\\n\",\n    \"        contents = _fix_special_symbols_in_html(contents)\\n\",\n    \"        contents = _fix_symbol_links(\\n\",\n    \"            contents, relative_url_prefix, doc_host, doc_baseurl\\n\",\n    \"        )\\n\",\n    \"        file.write_text(contents)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ccf9e23b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"************************************************************************************************************************\\n\",\n      \"source some text goes here Test and one more tag Test\\n\",\n      \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"[`Tester`](./api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n      \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n      \"\\n\",\n      \"************************************************************************************************************************\\n\",\n      \"source some text goes here Test and one more tag Test\\n\",\n      \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n      \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n      \"\\n\",\n      \"************************************************************************************************************************\\n\",\n      \"source some text goes here Test and one more tag Test\\n\",\n      \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n      \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n      \"\\n\",\n      \"************************************************************************************************************************\\n\",\n      \"source some text goes here Test and one more tag Test\\n\",\n      \"[`FastKafka`](../../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n      \"[`Tester`](../../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n      \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    module_name = \\\"fastkafka\\\"\\n\",\n    \"\\n\",\n    \"    docs_path = Path(d) / \\\"docusaurus\\\" / \\\"docs\\\"\\n\",\n    \"    docs_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    api_path = docs_path / \\\"api\\\"\\n\",\n    \"    api_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    blog_path = docs_path / \\\"blog\\\"\\n\",\n    \"    blog_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    nested_api_path = api_path / \\\"fastKafka\\\"\\n\",\n    \"    nested_api_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    for p in [docs_path, api_path, blog_path, nested_api_path]:\\n\",\n    \"        with open((p / \\\"file.md\\\"), \\\"w\\\") as f:\\n\",\n    \"            f.write(\\n\",\n    \"                \\\"\\\"\\\"source some text goes here Test and one more tag Test\\n\",\n    \"[`FastKafka`](https://airtai.github.io/fastkafka/api/fastkafka/FastKafka/#fastkafka.FastKafka)\\n\",\n    \"[`Tester`](https://airtai.github.io/fastkafka/api/fastkafka/testing/Tester/#fastkafka.testing.Tester)\\n\",\n    \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"            )\\n\",\n    \"\\n\",\n    \"    fix_invalid_syntax_in_markdown(str(docs_path))\\n\",\n    \"    expected = [\\n\",\n    \"        \\\"\\\"\\\"source some text goes here Test and one more tag Test\\n\",\n    \"[`FastKafka`](./api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"[`Tester`](./api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n    \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n    \"\\\"\\\"\\\",\\n\",\n    \"        \\\"\\\"\\\"source some text goes here Test and one more tag Test\\n\",\n    \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n    \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n    \"\\\"\\\"\\\",\\n\",\n    \"        \\\"\\\"\\\"source some text goes here Test and one more tag Test\\n\",\n    \"[`FastKafka`](../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"[`Tester`](../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n    \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n    \"\\\"\\\"\\\",\\n\",\n    \"        \\\"\\\"\\\"source some text goes here Test and one more tag Test\\n\",\n    \"[`FastKafka`](../../api/fastkafka/FastKafka.md#fastkafka.FastKafka)\\n\",\n    \"[`Tester`](../../api/fastkafka/testing/Tester.md#fastkafka.testing.Tester)\\n\",\n    \"[here](https://github.com/airtai/sample_fastkafka_with_redpanda)\\n\",\n    \"\\\"\\\"\\\",\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    for i, p in enumerate([docs_path, api_path, blog_path, nested_api_path]):\\n\",\n    \"        with open((p / \\\"file.md\\\"), \\\"r\\\") as f:\\n\",\n    \"            actual = f.read()\\n\",\n    \"            print(\\\"*\\\" * 120)\\n\",\n    \"            print(actual)\\n\",\n    \"            assert actual == expected[i]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ef4c7fa5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def generate_markdown_docs(module_name: str, docs_path: str) -> None:\\n\",\n    \"    \\\"\\\"\\\"Generates Markdown documentation files for the symbols in the given module and save them to the given directory.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        module_name: The name of the module to generate documentation for.\\n\",\n    \"        docs_path: The path to the directory where the documentation files will be saved.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    members_with_submodules = _get_submodules(module_name)\\n\",\n    \"    symbols = _load_submodules(module_name, members_with_submodules)\\n\",\n    \"    lib_version = get_config()[\\\"version\\\"]\\n\",\n    \"    \\n\",\n    \"    for symbol in symbols:\\n\",\n    \"        content = _get_formatted_docstring_for_symbol(symbol, lib_version)\\n\",\n    \"        target_file_path = (\\n\",\n    \"            \\\"/\\\".join(f\\\"{symbol.__module__}.{symbol.__name__}\\\".split(\\\".\\\")) + \\\".md\\\"\\n\",\n    \"        )\\n\",\n    \"        with open((Path(docs_path) / \\\"api\\\" / target_file_path), \\\"w\\\") as f:\\n\",\n    \"            f.write(content)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d95dbe68\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"0.1.0\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"@contextmanager\\n\",\n    \"def mock_get_config(lib_version):\\n\",\n    \"    with patch('__main__.get_config') as mock_get_config:\\n\",\n    \"        mock_get_config.return_value = {\\\"version\\\": lib_version}\\n\",\n    \"        yield\\n\",\n    \"        \\n\",\n    \"with mock_get_config(lib_version=\\\"0.1.0\\\"):\\n\",\n    \"    actual = get_config()[\\\"version\\\"]\\n\",\n    \"    print(actual)\\n\",\n    \"    expected = \\\"0.1.0\\\"\\n\",\n    \"    assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"39985ae4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"## mypackage_904.mymodule.FixtureClass {#mypackage_904.mymodule.FixtureClass}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"Fixture documentation\\n\",\n      \"\\n\",\n      \"### __init__ {#mypackage_904.mymodule.FixtureClass.init}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"__init__(\\n\",\n      \"    self, attribute\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"__init__ documentation url=HttpUrl(\\\" https://www.google.co.uk \\\", )\\n\",\n      \"\\n\",\n      \"### abstract_method {#mypackage_904.mymodule.FixtureClass.abstract_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@abstractmethod\\n\",\n      \"abstract_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"abstract_method documentation\\n\",\n      \"\\n\",\n      \"### class_method {#mypackage_904.mymodule.FixtureClass.class_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@classmethod\\n\",\n      \"class_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"class_method documentation\\n\",\n      \"\\n\",\n      \"### instance_method {#mypackage_904.mymodule.FixtureClass.instance_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"instance_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"instance_method documentation\\n\",\n      \"\\n\",\n      \"### patched_method_in_same_file {#mypackage_904.mymodule.FixtureClass.patched_method_in_same_file}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"patched_method_in_same_file(\\n\",\n      \"    self, s\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"I am a patched method in the same file\\n\",\n      \"\\n\",\n      \"### property_attribute {#mypackage_904.mymodule.FixtureClass.property_attribute}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@property\\n\",\n      \"property_attribute(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"property_attribute documentation\\n\",\n      \"\\n\",\n      \"### static_method {#mypackage_904.mymodule.FixtureClass.static_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/0.1.0/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@staticmethod\\n\",\n      \"static_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"static_method documentation\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"## mypackage_416.mymodule.FixtureClass {#mypackage_416.mymodule.FixtureClass}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"Fixture documentation\\n\",\n      \"\\n\",\n      \"### __init__ {#mypackage_416.mymodule.FixtureClass.init}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"__init__(\\n\",\n      \"    self, attribute\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"__init__ documentation url=HttpUrl(\\\" https://www.google.co.uk \\\", )\\n\",\n      \"\\n\",\n      \"### abstract_method {#mypackage_416.mymodule.FixtureClass.abstract_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@abstractmethod\\n\",\n      \"abstract_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"abstract_method documentation\\n\",\n      \"\\n\",\n      \"### class_method {#mypackage_416.mymodule.FixtureClass.class_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@classmethod\\n\",\n      \"class_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"class_method documentation\\n\",\n      \"\\n\",\n      \"### instance_method {#mypackage_416.mymodule.FixtureClass.instance_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"instance_method(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"instance_method documentation\\n\",\n      \"\\n\",\n      \"### patched_method_in_same_file {#mypackage_416.mymodule.FixtureClass.patched_method_in_same_file}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"patched_method_in_same_file(\\n\",\n      \"    self, s\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"I am a patched method in the same file\\n\",\n      \"\\n\",\n      \"### property_attribute {#mypackage_416.mymodule.FixtureClass.property_attribute}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@property\\n\",\n      \"property_attribute(\\n\",\n      \"    self\\n\",\n      \")\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"property_attribute documentation\\n\",\n      \"\\n\",\n      \"### static_method {#mypackage_416.mymodule.FixtureClass.static_method}\\n\",\n      \"\\n\",\n      \"<a href=\\\"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\n\",\n      \"\\n\",\n      \"```py\\n\",\n      \"@staticmethod\\n\",\n      \"static_method()\\n\",\n      \"```\\n\",\n      \"\\n\",\n      \"static_method documentation\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"module_code = '''\\n\",\n    \"\\n\",\n    \"__all__ = ['FixtureClass']\\n\",\n    \"\\n\",\n    \"from typing import *\\n\",\n    \"from abc import abstractmethod\\n\",\n    \"\\n\",\n    \"from fastcore.basics import patch\\n\",\n    \"\\n\",\n    \"class FixtureClass:\\n\",\n    \"    \\\"\\\"\\\"Fixture documentation\\\"\\\"\\\"\\n\",\n    \"    \\n\",\n    \"    def __init__(self, attribute):\\n\",\n    \"        \\\"\\\"\\\"__init__ documentation url=HttpUrl(\\\"https://www.google.co.uk\\\", )\\\"\\\"\\\"\\n\",\n    \"        self.attribute = attribute\\n\",\n    \"        \\n\",\n    \"    def __str__(self):\\n\",\n    \"        \\\"\\\"\\\"__str__ documentation\\\"\\\"\\\"\\n\",\n    \"        return f\\\"MyClass instance with attribute: {self.attribute}\\\"\\n\",\n    \"    \\n\",\n    \"    @property\\n\",\n    \"    def property_attribute(self):\\n\",\n    \"        \\\"\\\"\\\"property_attribute documentation\\\"\\\"\\\"\\n\",\n    \"        return self.attribute\\n\",\n    \"    \\n\",\n    \"    @classmethod\\n\",\n    \"    def class_method(cls):\\n\",\n    \"        \\\"\\\"\\\"class_method documentation\\\"\\\"\\\"\\n\",\n    \"        return cls.class_variable\\n\",\n    \"    \\n\",\n    \"    @staticmethod\\n\",\n    \"    def static_method():\\n\",\n    \"        \\\"\\\"\\\"static_method documentation\\\"\\\"\\\"\\n\",\n    \"        return \\\"This is a static method\\\"\\n\",\n    \"    \\n\",\n    \"    def instance_method(self):\\n\",\n    \"        \\\"\\\"\\\"instance_method documentation\\\"\\\"\\\"\\n\",\n    \"        return \\\"This is an instance method\\\"\\n\",\n    \"    \\n\",\n    \"    @abstractmethod\\n\",\n    \"    def abstract_method(self):\\n\",\n    \"        \\\"\\\"\\\"abstract_method documentation\\\"\\\"\\\"\\n\",\n    \"        pass\\n\",\n    \"        \\n\",\n    \"@patch\\n\",\n    \"def patched_method_in_same_file(self:FixtureClass, s: str) -> None: \\n\",\n    \"    \\\"\\\"\\\"I am a patched method in the same file\\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"'''\\n\",\n    \"\\n\",\n    \"for lib_version in [\\\"0.1.0\\\", \\\"0.1.1rc0\\\"]:\\n\",\n    \"    with TemporaryDirectory() as d:\\n\",\n    \"        my_package = f\\\"mypackage_{random.randint(0, 1000)}\\\"\\n\",\n    \"        module_name = \\\"mymodule\\\"\\n\",\n    \"\\n\",\n    \"        docs_path = Path(d) / \\\"docusaurus\\\" / \\\"docs\\\"\\n\",\n    \"        docs_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"        api_path = docs_path / \\\"api\\\" / my_package / module_name\\n\",\n    \"        api_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"        my_package_path = Path(d) / my_package\\n\",\n    \"        my_package_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"        file_path = my_package_path / f\\\"{module_name}.py\\\"\\n\",\n    \"\\n\",\n    \"        with open(file_path, \\\"w\\\", encoding=\\\"utf-8\\\") as file:\\n\",\n    \"            file.write(module_code)\\n\",\n    \"\\n\",\n    \"        with open((my_package_path / \\\"__init__.py\\\"), \\\"w\\\") as f:\\n\",\n    \"            f.write(f'__version__ = \\\"{lib_version}\\\"')\\n\",\n    \"\\n\",\n    \"        with add_tmp_path_to_sys_path(d):\\n\",\n    \"            with mock_get_config(lib_version=lib_version):\\n\",\n    \"                with mock_custom_nbdev_lookup():\\n\",\n    \"                    members_with_submodules = _get_submodules(my_package)\\n\",\n    \"                    symbols = _load_submodules(my_package, members_with_submodules)\\n\",\n    \"                    generate_markdown_docs(my_package, str(docs_path))\\n\",\n    \"\\n\",\n    \"        with open(api_path / \\\"FixtureClass.md\\\", \\\"r\\\", encoding=\\\"utf-8\\\") as file:\\n\",\n    \"            actual = file.read()\\n\",\n    \"    \\n\",\n    \"    gh_tag = lib_version if lib_version.replace(\\\".\\\", \\\"\\\").isdigit() else \\\"main\\\"\\n\",\n    \"    expected = f\\\"## {my_package}\\\" + \\\".mymodule.FixtureClass {#\\\" + my_package + '.mymodule.FixtureClass}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n\\\\nFixture documentation\\\\n\\\\n### __init__ {#' + my_package + '.mymodule.FixtureClass.init}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n__init__(\\\\n    self, attribute\\\\n)\\\\n```\\\\n\\\\n__init__ documentation url=HttpUrl(\\\" https://www.google.co.uk \\\", )\\\\n\\\\n### abstract_method {#' + my_package + '.mymodule.FixtureClass.abstract_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@abstractmethod\\\\nabstract_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\nabstract_method documentation\\\\n\\\\n### class_method {#' + my_package + '.mymodule.FixtureClass.class_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@classmethod\\\\nclass_method()\\\\n```\\\\n\\\\nclass_method documentation\\\\n\\\\n### instance_method {#' + my_package + '.mymodule.FixtureClass.instance_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\ninstance_method(\\\\n    self\\\\n)\\\\n```\\\\n\\\\ninstance_method documentation\\\\n\\\\n### patched_method_in_same_file {#' + my_package + '.mymodule.FixtureClass.patched_method_in_same_file}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\npatched_method_in_same_file(\\\\n    self, s\\\\n)\\\\n```\\\\n\\\\nI am a patched method in the same file\\\\n\\\\n### property_attribute {#' + my_package + '.mymodule.FixtureClass.property_attribute}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@property\\\\nproperty_attribute(\\\\n    self\\\\n)\\\\n```\\\\n\\\\nproperty_attribute documentation\\\\n\\\\n### static_method {#' + my_package + '.mymodule.FixtureClass.static_method}\\\\n\\\\n<a href=\\\"https://github.com/airtai/fastkafka/blob/' + gh_tag + '/fastkafka/_application/app.py#L171-L425\\\" class=\\\"link-to-source\\\" target=\\\"_blank\\\">View source</a>\\\\n\\\\n```py\\\\n@staticmethod\\\\nstatic_method()\\\\n```\\\\n\\\\nstatic_method documentation\\\\n\\\\n'\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a7b556ef\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _parse_lines(lines: List[str]) -> Tuple[List[str], int]:\\n\",\n    \"    \\\"\\\"\\\"Parse a list of lines and return a tuple containing a list of filenames and an index indicating how many lines to skip.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        lines: A list of strings representing lines of input text.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A tuple containing a list of strings representing the filenames extracted\\n\",\n    \"        from links in the lines and an integer representing the number of lines to skip.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    index = next(\\n\",\n    \"        (i for i, line in enumerate(lines) if not line.strip().startswith(\\\"- [\\\")),\\n\",\n    \"        len(lines),\\n\",\n    \"    )\\n\",\n    \"    return [line.split(\\\"(\\\")[1][:-4] for line in lines[:index]], index\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bc6fdfe9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"(['api/fastkafka/encoder/json_encoder'], 1)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = ['            - [json_encoder](api/fastkafka/encoder/json_encoder.md)', '        - testing', '            - [ApacheKafkaBroker](api/fastkafka/testing/ApacheKafkaBroker.md)', '            - [LocalRedpandaBroker](api/fastkafka/testing/LocalRedpandaBroker.md)', '            - [Tester](api/fastkafka/testing/Tester.md)']\\n\",\n    \"expected = (['api/fastkafka/encoder/json_encoder'], 1)\\n\",\n    \"\\n\",\n    \"actual = _parse_lines(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b816ec5c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"(['api/fastkafka/testing/ApacheKafkaBroker', 'api/fastkafka/testing/LocalRedpandaBroker', 'api/fastkafka/testing/Tester'], 3)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"\\n\",\n    \"fixture = ['            - [ApacheKafkaBroker](api/fastkafka/testing/ApacheKafkaBroker.md)', '            - [LocalRedpandaBroker](api/fastkafka/testing/LocalRedpandaBroker.md)', '            - [Tester](api/fastkafka/testing/Tester.md)']\\n\",\n    \"expected = (['api/fastkafka/testing/ApacheKafkaBroker', 'api/fastkafka/testing/LocalRedpandaBroker', 'api/fastkafka/testing/Tester'], 3)\\n\",\n    \"\\n\",\n    \"actual = _parse_lines(fixture)\\n\",\n    \"print(actual)\\n\",\n    \"\\n\",\n    \"assert actual == expected, actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d5be29aa\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _parse_section(text: str, ignore_first_line: bool = False) -> List[Any]:\\n\",\n    \"    \\\"\\\"\\\"Parse the given section contents and return a list of file names in the expected format.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        text: A string representing the contents of a file.\\n\",\n    \"        ignore_first_line: Flag indicating whether to ignore the first line extracting the section contents.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of filenames in the expected format\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pattern = r\\\"\\\\[.*?\\\\]\\\\((.*?)\\\\)|\\\\[(.*?)\\\\]\\\\[(.*?)\\\\]\\\"\\n\",\n    \"    lines = text.split(\\\"\\\\n\\\")[1:] if ignore_first_line else text.split(\\\"\\\\n\\\")\\n\",\n    \"    ret_val = []\\n\",\n    \"    index = 0\\n\",\n    \"    while index < len(lines):\\n\",\n    \"        line = lines[index]\\n\",\n    \"        match = re.search(pattern, line.strip())\\n\",\n    \"        if match is not None:\\n\",\n    \"            ret_val.append(match.group(1).split(\\\".md\\\")[0])\\n\",\n    \"            index += 1\\n\",\n    \"        elif line.strip() != \\\"\\\":\\n\",\n    \"            value, skip_lines = _parse_lines(lines[index + 1 :])\\n\",\n    \"            ret_val.append({line.replace(\\\"-\\\", \\\"\\\").strip(): value})\\n\",\n    \"            index += skip_lines + 1\\n\",\n    \"        else:\\n\",\n    \"            index += 1\\n\",\n    \"    return ret_val\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"87a06535\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"['api/fastkafka/FastKafka',\\n\",\n       \" 'api/fastkafka/KafkaEvent',\\n\",\n       \" {'encoder': ['api/fastkafka/encoder/json_encoder']},\\n\",\n       \" {'testing': ['api/fastkafka/testing/ApacheKafkaBroker',\\n\",\n       \"   'api/fastkafka/testing/LocalRedpandaBroker',\\n\",\n       \"   'api/fastkafka/testing/Tester']}]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"    - fastkafka\\n\",\n    \"        - [FastKafka](api/fastkafka/FastKafka.md)\\n\",\n    \"        - [KafkaEvent](api/fastkafka/KafkaEvent.md)\\n\",\n    \"        - encoder\\n\",\n    \"            - [json_encoder](api/fastkafka/encoder/json_encoder.md)\\n\",\n    \"        - testing\\n\",\n    \"            - [ApacheKafkaBroker](api/fastkafka/testing/ApacheKafkaBroker.md)\\n\",\n    \"            - [LocalRedpandaBroker](api/fastkafka/testing/LocalRedpandaBroker.md)\\n\",\n    \"            - [Tester](api/fastkafka/testing/Tester.md)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = [\\n\",\n    \"    \\\"api/fastkafka/FastKafka\\\",\\n\",\n    \"    \\\"api/fastkafka/KafkaEvent\\\",\\n\",\n    \"    {\\\"encoder\\\": [\\\"api/fastkafka/encoder/json_encoder\\\"]},\\n\",\n    \"    {\\n\",\n    \"        \\\"testing\\\": [\\n\",\n    \"            \\\"api/fastkafka/testing/ApacheKafkaBroker\\\",\\n\",\n    \"            \\\"api/fastkafka/testing/LocalRedpandaBroker\\\",\\n\",\n    \"            \\\"api/fastkafka/testing/Tester\\\",\\n\",\n    \"        ]\\n\",\n    \"    }\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"ignore_first_line = True\\n\",\n    \"actual = _parse_section(fixture, ignore_first_line)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"399d66b1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"[{'Writing services': ['guides/Guide_11_Consumes_Basics',\\n\",\n       \"   'guides/Guide_21_Produces_Basics']},\\n\",\n       \" {'Testing': ['guides/Guide_31_Using_redpanda_to_test_fastkafka']}]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"    - Writing services\\n\",\n    \"        - [@consumes basics](guides/Guide_11_Consumes_Basics.md)\\n\",\n    \"        - [@consumes basics](guides/Guide_21_Produces_Basics.md)\\n\",\n    \"    - Testing\\n\",\n    \"        - [Using Redpanda to test FastKafka](guides/Guide_31_Using_redpanda_to_test_fastkafka.md)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = [\\n\",\n    \"    {\\n\",\n    \"        \\\"Writing services\\\": [\\n\",\n    \"            \\\"guides/Guide_11_Consumes_Basics\\\",\\n\",\n    \"            \\\"guides/Guide_21_Produces_Basics\\\",\\n\",\n    \"        ],\\n\",\n    \"    },\\n\",\n    \"    {\\n\",\n    \"        \\\"Testing\\\": [\\\"guides/Guide_31_Using_redpanda_to_test_fastkafka\\\"],\\n\",\n    \"    },\\n\",\n    \"]\\n\",\n    \"\\n\",\n    \"actual = _parse_section(fixture)\\n\",\n    \"display(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9212e84e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _get_section_from_markdown(\\n\",\n    \"    markdown_text: str, section_header: str\\n\",\n    \") -> Optional[str]:\\n\",\n    \"    \\\"\\\"\\\"Get the contents of the section header from the given markdown text\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        markdown_text: A string containing the markdown text to extract the section from.\\n\",\n    \"        section_header: A string representing the header of the section to extract.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A string representing the contents of the section header if the section header\\n\",\n    \"        is present in the markdown text, else None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pattern = re.compile(rf\\\"^- {section_header}\\\\n((?:\\\\s+- .*\\\\n)+)\\\", re.M)\\n\",\n    \"    match = pattern.search(markdown_text)\\n\",\n    \"    return match.group(1) if match else None\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ea297651\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"    - fastkafka\\n\",\n      \"        - [FastKafka](api/fastkafka/FastKafka.md)\\n\",\n      \"        - [KafkaEvent](api/fastkafka/KafkaEvent.md)\\n\",\n      \"        - encoder\\n\",\n      \"            - [json_encoder](api/fastkafka/encoder/json_encoder.md)\\n\",\n      \"        - testing\\n\",\n      \"            - [ApacheKafkaBroker](api/fastkafka/testing/ApacheKafkaBroker.md)\\n\",\n      \"            - [LocalRedpandaBroker](api/fastkafka/testing/LocalRedpandaBroker.md)\\n\",\n      \"            - [Tester](api/fastkafka/testing/Tester.md)\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"summary = \\\"\\\"\\\"- [FastKafka](index.md)\\n\",\n    \"- Guides\\n\",\n    \"    - Writing services\\n\",\n    \"        - [@consumes basics](guides/Guide_11_Consumes_Basics.md)\\n\",\n    \"        - [@consumes basics](guides/Guide_11_Consumes_Basics.md)\\n\",\n    \"    - Testing\\n\",\n    \"        - [Using Redpanda to test FastKafka](guides/Guide_31_Using_redpanda_to_test_fastkafka.md)\\n\",\n    \"- API\\n\",\n    \"    - fastkafka\\n\",\n    \"        - [FastKafka](api/fastkafka/FastKafka.md)\\n\",\n    \"        - [KafkaEvent](api/fastkafka/KafkaEvent.md)\\n\",\n    \"        - encoder\\n\",\n    \"            - [json_encoder](api/fastkafka/encoder/json_encoder.md)\\n\",\n    \"        - testing\\n\",\n    \"            - [ApacheKafkaBroker](api/fastkafka/testing/ApacheKafkaBroker.md)\\n\",\n    \"            - [LocalRedpandaBroker](api/fastkafka/testing/LocalRedpandaBroker.md)\\n\",\n    \"            - [Tester](api/fastkafka/testing/Tester.md)\\n\",\n    \"- CLI\\n\",\n    \"    - [fastkafka](cli/fastkafka.md)\\n\",\n    \"    - [run_fastkafka_server_process](cli/run_fastkafka_server_process.md)\\n\",\n    \"- [Releases](CHANGELOG.md)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"section_header = \\\"API\\\"\\n\",\n    \"expected = \\\"\\\"\\\"    - fastkafka\\n\",\n    \"        - [FastKafka](api/fastkafka/FastKafka.md)\\n\",\n    \"        - [KafkaEvent](api/fastkafka/KafkaEvent.md)\\n\",\n    \"        - encoder\\n\",\n    \"            - [json_encoder](api/fastkafka/encoder/json_encoder.md)\\n\",\n    \"        - testing\\n\",\n    \"            - [ApacheKafkaBroker](api/fastkafka/testing/ApacheKafkaBroker.md)\\n\",\n    \"            - [LocalRedpandaBroker](api/fastkafka/testing/LocalRedpandaBroker.md)\\n\",\n    \"            - [Tester](api/fastkafka/testing/Tester.md)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"actual = _get_section_from_markdown(summary, section_header)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fa11acce\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"    - [fastkafka](cli/fastkafka.md)\\n\",\n      \"    - [run_fastkafka_server_process](cli/run_fastkafka_server_process.md)\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"section_header = \\\"CLI\\\"\\n\",\n    \"expected = \\\"\\\"\\\"    - [fastkafka](cli/fastkafka.md)\\n\",\n    \"    - [run_fastkafka_server_process](cli/run_fastkafka_server_process.md)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"actual = _get_section_from_markdown(summary, section_header)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2b41c549\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"    - Writing services\\n\",\n      \"        - [@consumes basics](guides/Guide_11_Consumes_Basics.md)\\n\",\n      \"        - [@consumes basics](guides/Guide_11_Consumes_Basics.md)\\n\",\n      \"    - Testing\\n\",\n      \"        - [Using Redpanda to test FastKafka](guides/Guide_31_Using_redpanda_to_test_fastkafka.md)\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"section_header = \\\"Guides\\\"\\n\",\n    \"expected = \\\"\\\"\\\"    - Writing services\\n\",\n    \"        - [@consumes basics](guides/Guide_11_Consumes_Basics.md)\\n\",\n    \"        - [@consumes basics](guides/Guide_11_Consumes_Basics.md)\\n\",\n    \"    - Testing\\n\",\n    \"        - [Using Redpanda to test FastKafka](guides/Guide_31_Using_redpanda_to_test_fastkafka.md)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"actual = _get_section_from_markdown(summary, section_header)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"43f07f1a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"None\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"section_header = \\\"Invalid Section\\\"\\n\",\n    \"expected = None\\n\",\n    \"actual = _get_section_from_markdown(summary, section_header)\\n\",\n    \"print(actual)\\n\",\n    \"assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0c44fee7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def generate_sidebar(\\n\",\n    \"    summary_file: str = \\\"./docusaurus/docs/SUMMARY.md\\\",\\n\",\n    \"    summary: str = \\\"\\\",\\n\",\n    \"    target: str = \\\"./docusaurus/sidebars.js\\\",\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Generate a sidebar js file for a Docusaurus documentation site based on a SUMMARY.md file.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        summary_file: The path to the SUMMARY.md file containing the documentation structure.\\n\",\n    \"            Default is \\\"./docusaurus/docs/SUMMARY.md\\\".\\n\",\n    \"        summary: An optional summary string.\\n\",\n    \"            Default is an empty string.\\n\",\n    \"        target: The path to the target sidebar js file to be generated.\\n\",\n    \"            Default is \\\"./docusaurus/sidebars.js\\\".\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None: The function does not return any value directly, but it generates a sidebar file.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        FileNotFoundError: If the specified `summary_file` does not exist.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    with open(summary_file, \\\"r\\\") as stream, open(target, \\\"w\\\") as target_stream:\\n\",\n    \"        summary_contents = stream.read()\\n\",\n    \"\\n\",\n    \"        guides_summary = _get_section_from_markdown(summary_contents, \\\"Guides\\\")\\n\",\n    \"        parsed_guides = _parse_section(guides_summary)  # type: ignore\\n\",\n    \"\\n\",\n    \"        api_summary = _get_section_from_markdown(summary_contents, \\\"API\\\")\\n\",\n    \"        parsed_api = _parse_section(api_summary, True)  # type: ignore\\n\",\n    \"\\n\",\n    \"        cli_summary = _get_section_from_markdown(summary_contents, \\\"CLI\\\")\\n\",\n    \"        parsed_cli = _parse_section(cli_summary)  # type: ignore\\n\",\n    \"\\n\",\n    \"        target_stream.write(\\n\",\n    \"            \\\"\\\"\\\"module.exports = {\\n\",\n    \"tutorialSidebar: [\\n\",\n    \"    'index', {'Guides': \\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"            + str(parsed_guides)\\n\",\n    \"            + \\\"},\\\"\\n\",\n    \"            + \\\"{'API': [\\\"\\n\",\n    \"            + str(parsed_api)[1:-1]\\n\",\n    \"            + \\\"]},\\\"\\n\",\n    \"            + \\\"{'CLI': \\\"\\n\",\n    \"            + str(parsed_cli)\\n\",\n    \"            + \\\"},\\\"\\n\",\n    \"            + \\\"\\\"\\\"\\n\",\n    \"    \\\"LICENSE\\\",\\n\",\n    \"    \\\"CONTRIBUTING\\\",\\n\",\n    \"    \\\"CHANGELOG\\\",\\n\",\n    \"],\\n\",\n    \"};\\\"\\\"\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3743fd1c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"module.exports = {\\n\",\n      \"tutorialSidebar: [\\n\",\n      \"    'index', {'Guides': \\n\",\n      \"    [{'Writing services': ['guides/Guide_05_Lifespan_Handler', 'guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka']}, {'Testing': ['guides/Guide_31_Using_redpanda_to_test_fastkafka']}, {'Documentation generation': ['guides/Guide_04_Github_Actions_Workflow']}]},{'API': ['api/fastkafka/FastKafka', 'api/fastkafka/KafkaEvent', {'encoder': ['api/fastkafka/encoder/AvroBase', 'api/fastkafka/encoder/json_decoder', 'api/fastkafka/encoder/json_encoder']}, {'testing': ['api/fastkafka/testing/ApacheKafkaBroker']}]},{'CLI': ['cli/fastkafka', 'cli/run_fastkafka_server_process']},\\n\",\n      \"    \\\"LICENSE\\\",\\n\",\n      \"    \\\"CONTRIBUTING\\\",\\n\",\n      \"    \\\"CHANGELOG\\\",\\n\",\n      \"],\\n\",\n      \"};\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"summary = \\\"\\\"\\\"- [FastKafka](index.md)\\n\",\n    \"- Guides\\n\",\n    \"    - Writing services\\n\",\n    \"        - [Lifespan Events](guides/Guide_05_Lifespan_Handler.md)\\n\",\n    \"        - [Encoding and Decoding Kafka Messages with FastKafka](guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md)\\n\",\n    \"    - Testing\\n\",\n    \"        - [Using Redpanda to test FastKafka](guides/Guide_31_Using_redpanda_to_test_fastkafka.md)\\n\",\n    \"    - Documentation generation\\n\",\n    \"        - [Deploy FastKafka docs to GitHub Pages](guides/Guide_04_Github_Actions_Workflow.md)\\n\",\n    \"- API\\n\",\n    \"    - fastkafka\\n\",\n    \"        - [FastKafka](api/fastkafka/FastKafka.md)\\n\",\n    \"        - [KafkaEvent](api/fastkafka/KafkaEvent.md)\\n\",\n    \"        - encoder\\n\",\n    \"            - [AvroBase](api/fastkafka/encoder/AvroBase.md)\\n\",\n    \"            - [json_decoder](api/fastkafka/encoder/json_decoder.md)\\n\",\n    \"            - [json_encoder](api/fastkafka/encoder/json_encoder.md)\\n\",\n    \"        - testing\\n\",\n    \"            - [ApacheKafkaBroker](api/fastkafka/testing/ApacheKafkaBroker.md)\\n\",\n    \"- CLI\\n\",\n    \"    - [fastkafka](cli/fastkafka.md)\\n\",\n    \"    - [run_fastkafka_server_process](cli/run_fastkafka_server_process.md)\\n\",\n    \"- [Releases](CHANGELOG.md)\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as directory:\\n\",\n    \"    with open(directory + \\\"/SUMMARY.md\\\", \\\"w\\\") as stream:\\n\",\n    \"        stream.write(summary)\\n\",\n    \"\\n\",\n    \"    generate_sidebar(\\n\",\n    \"        summary_file=directory + \\\"/SUMMARY.md\\\", target=directory + \\\"/test.js\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    with open(directory + \\\"/test.js\\\") as stream:\\n\",\n    \"        stream = stream.read()\\n\",\n    \"\\n\",\n    \"print(stream)\\n\",\n    \"assert (\\n\",\n    \"    stream\\n\",\n    \"    == \\\"\\\"\\\"module.exports = {\\n\",\n    \"tutorialSidebar: [\\n\",\n    \"    'index', {'Guides': \\n\",\n    \"    [{'Writing services': ['guides/Guide_05_Lifespan_Handler', 'guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka']}, {'Testing': ['guides/Guide_31_Using_redpanda_to_test_fastkafka']}, {'Documentation generation': ['guides/Guide_04_Github_Actions_Workflow']}]},{'API': ['api/fastkafka/FastKafka', 'api/fastkafka/KafkaEvent', {'encoder': ['api/fastkafka/encoder/AvroBase', 'api/fastkafka/encoder/json_decoder', 'api/fastkafka/encoder/json_encoder']}, {'testing': ['api/fastkafka/testing/ApacheKafkaBroker']}]},{'CLI': ['cli/fastkafka', 'cli/run_fastkafka_server_process']},\\n\",\n    \"    \\\"LICENSE\\\",\\n\",\n    \"    \\\"CONTRIBUTING\\\",\\n\",\n    \"    \\\"CHANGELOG\\\",\\n\",\n    \"],\\n\",\n    \"};\\\"\\\"\\\"\\n\",\n    \"), stream\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"975a9af5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def _get_markdown_filenames_from_sidebar(sidebar_file_path: str) -> List[str]:\\n\",\n    \"    \\\"\\\"\\\"Get a list of Markdown filenames included in the sidebar.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        sidebar_file_path: The path to the sidebar file.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A list of Markdown filenames included in the sidebar.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    with open(sidebar_file_path, \\\"r\\\") as file:\\n\",\n    \"        file_content = file.read()\\n\",\n    \"\\n\",\n    \"        pattern = r\\\"tutorialSidebar:\\\\s*(\\\\[.*\\\\])\\\\s*,\\\\s*\\\\n?\\\\s*};\\\"\\n\",\n    \"        match = re.search(pattern, file_content, re.DOTALL)\\n\",\n    \"        all_sidebar_files = ast.literal_eval(match.group(1)) if match else []\\n\",\n    \"        markdown_filenames = [\\n\",\n    \"            f\\\"{v}.md\\\" for v in all_sidebar_files if isinstance(v, str)\\n\",\n    \"        ]\\n\",\n    \"        return markdown_filenames\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"711b627a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"['CONTRIBUTING.md', 'LICENSE.md', 'index.md']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    docs_path = Path(d) / \\\"docusaurus\\\" / \\\"docs\\\"\\n\",\n    \"    docs_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    sidebar_file_path = Path(d) / \\\"docusaurus\\\" / \\\"sidebar.js\\\"\\n\",\n    \"    with open(sidebar_file_path, \\\"w\\\") as f:\\n\",\n    \"        f.write(\\n\",\n    \"            \\\"\\\"\\\"module.exports = {\\n\",\n    \"tutorialSidebar: [\\n\",\n    \"    'index', {'Guides': \\n\",\n    \"    [{'Writing services': ['guides/Guide_05_Lifespan_Handler', 'guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka']}, {'Testing': ['guides/Guide_31_Using_redpanda_to_test_fastkafka']}, {'Documentation generation': ['guides/Guide_04_Github_Actions_Workflow']}]},{'API': ['api/fastkafka/FastKafka', 'api/fastkafka/KafkaEvent', {'encoder': ['api/fastkafka/encoder/AvroBase', 'api/fastkafka/encoder/json_decoder', 'api/fastkafka/encoder/json_encoder']}, {'testing': ['api/fastkafka/testing/ApacheKafkaBroker']}]},{'CLI': ['cli/fastkafka', 'cli/run_fastkafka_server_process']},\\n\",\n    \"    \\\"LICENSE\\\",\\n\",\n    \"    \\\"CONTRIBUTING\\\",\\n\",\n    \"],\\n\",\n    \"};\\\"\\\"\\\"\\n\",\n    \"        )\\n\",\n    \"        \\n\",\n    \"    expected = [\\\"index.md\\\", \\\"LICENSE.md\\\", \\\"CONTRIBUTING.md\\\"]\\n\",\n    \"    actual = _get_markdown_filenames_from_sidebar(str(sidebar_file_path))\\n\",\n    \"    print(sorted(actual))\\n\",\n    \"    assert sorted(actual) == sorted(expected), actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d52d7808\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _delete_files(files: List[Path]) -> None:\\n\",\n    \"    \\\"\\\"\\\"Deletes a list of files.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        files: A list of Path objects representing the files to be deleted.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        OSError: If an error occurs while deleting a file.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    for file in files:\\n\",\n    \"        try:\\n\",\n    \"            file.unlink()\\n\",\n    \"        except OSError as e:\\n\",\n    \"            typer.echo(\\n\",\n    \"                f\\\"Error deleting files from docusaurus/docs directory. Could not delete file: {file} - {e}\\\"\\n\",\n    \"            )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b09c055e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Error deleting files from docusaurus/docs directory. Could not delete file: /tmp/tmpor98b7dh/docusaurus/docs/t.txt - [Errno 2] No such file or directory: '/tmp/tmpor98b7dh/docusaurus/docs/t.txt'\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    sample_file = Path(d) / \\\"docusaurus\\\" / \\\"docs\\\" / \\\"t.txt\\\"\\n\",\n    \"    _delete_files([sample_file])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"33356232\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    docs_path = Path(d) / \\\"docusaurus\\\" / \\\"docs\\\"\\n\",\n    \"    docs_path.mkdir(parents=True)\\n\",\n    \"    \\n\",\n    \"    sample_file = docs_path / \\\"file.md\\\"\\n\",\n    \"    with open(sample_file, \\\"w\\\") as f:\\n\",\n    \"        f.write(\\\"sample text\\\")\\n\",\n    \"\\n\",\n    \"    _delete_files([sample_file])\\n\",\n    \"    actual = [file_path for file_path in Path(docs_path).glob(\\\"*.md\\\")]\\n\",\n    \"    print(actual)\\n\",\n    \"    assert actual == []\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a8a8e858\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def delete_unused_markdown_files_from_sidebar(\\n\",\n    \"    docs_path: str, sidebar_file_path: str\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"Delete the markdown files from the docs directory that are not present in the sidebar.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        docs_path: Path to the directory containing the markdown files.\\n\",\n    \"        sidebar_file_path: Path to the sidebar file.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    md_filenames_in_sidebar = _get_markdown_filenames_from_sidebar(\\n\",\n    \"        str(sidebar_file_path)\\n\",\n    \"    )\\n\",\n    \"    if len(md_filenames_in_sidebar) > 0:\\n\",\n    \"        all_md_files_in_docs_dir = [\\n\",\n    \"            file_path for file_path in Path(docs_path).glob(\\\"*.md\\\")\\n\",\n    \"        ]\\n\",\n    \"        md_files_in_sidebar = [Path(docs_path) / f for f in md_filenames_in_sidebar]\\n\",\n    \"        md_files_to_delete = list(\\n\",\n    \"            set(all_md_files_in_docs_dir) - set(md_files_in_sidebar)\\n\",\n    \"        )\\n\",\n    \"        _delete_files(md_files_to_delete)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"383051ae\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[Path('/tmp/tmp4lrwkf_6/docusaurus/docs/CONTRIBUTING.md'), Path('/tmp/tmp4lrwkf_6/docusaurus/docs/LICENSE.md'), Path('/tmp/tmp4lrwkf_6/docusaurus/docs/index.md')]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    docs_path = Path(d) / \\\"docusaurus\\\" / \\\"docs\\\"\\n\",\n    \"    docs_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    api_path = docs_path / \\\"api\\\"\\n\",\n    \"    api_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    nested_api_path = api_path / \\\"fastKafka\\\"\\n\",\n    \"    nested_api_path.mkdir(parents=True)\\n\",\n    \"\\n\",\n    \"    for p in [api_path, nested_api_path]:\\n\",\n    \"        with open((p / \\\"file.md\\\"), \\\"w\\\") as f:\\n\",\n    \"            f.write(\\\"sample text\\\")\\n\",\n    \"\\n\",\n    \"    markdown_files = [\\n\",\n    \"        docs_path / \\\"index.md\\\",\\n\",\n    \"        docs_path / \\\"LICENSE.md\\\",\\n\",\n    \"        docs_path / \\\"CONTRIBUTING.md\\\",\\n\",\n    \"        docs_path / \\\"Non_sidebar_file_1.md\\\",\\n\",\n    \"        docs_path / \\\"Non_sidebar_file_2.md\\\",\\n\",\n    \"        docs_path / \\\"Non_sidebar_file_3.md\\\",\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    for f in markdown_files:\\n\",\n    \"        with open(f, \\\"w\\\") as f:\\n\",\n    \"            f.write(\\\"sample text\\\")\\n\",\n    \"\\n\",\n    \"    sidebar_file_path = Path(d) / \\\"docusaurus\\\" / \\\"sidebar.js\\\"\\n\",\n    \"    with open(sidebar_file_path, \\\"w\\\") as f:\\n\",\n    \"        f.write(\\n\",\n    \"            \\\"\\\"\\\"module.exports = {\\n\",\n    \"tutorialSidebar: [\\n\",\n    \"    'index', {'Guides': \\n\",\n    \"    [{'Writing services': ['guides/Guide_05_Lifespan_Handler', 'guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka']}, {'Testing': ['guides/Guide_31_Using_redpanda_to_test_fastkafka']}, {'Documentation generation': ['guides/Guide_04_Github_Actions_Workflow']}]},{'API': ['api/fastkafka/FastKafka', 'api/fastkafka/KafkaEvent', {'encoder': ['api/fastkafka/encoder/AvroBase', 'api/fastkafka/encoder/json_decoder', 'api/fastkafka/encoder/json_encoder']}, {'testing': ['api/fastkafka/testing/ApacheKafkaBroker']}]},{'CLI': ['cli/fastkafka', 'cli/run_fastkafka_server_process']},\\n\",\n    \"    \\\"LICENSE\\\",\\n\",\n    \"    \\\"CONTRIBUTING\\\",\\n\",\n    \"],\\n\",\n    \"};\\\"\\\"\\\"\\n\",\n    \"        )\\n\",\n    \"        \\n\",\n    \"    expected = [\\n\",\n    \"        docs_path / \\\"index.md\\\",\\n\",\n    \"        docs_path / \\\"LICENSE.md\\\",\\n\",\n    \"        docs_path / \\\"CONTRIBUTING.md\\\",\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    delete_unused_markdown_files_from_sidebar(str(docs_path), str(sidebar_file_path))\\n\",\n    \"    actual = [file_path for file_path in Path(docs_path).glob(\\\"*.md\\\")]\\n\",\n    \"    print(sorted(actual))\\n\",\n    \"    assert sorted(actual) == sorted(expected), actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"18b20253\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def update_readme() -> None:\\n\",\n    \"    \\\"\\\"\\\"Update the readme file and fix the symbol links\\\"\\\"\\\"\\n\",\n    \"    cfg = get_config()\\n\",\n    \"    readme_path = cfg.config_path / \\\"README.md\\\"\\n\",\n    \"    nbdev_readme.__wrapped__()\\n\",\n    \"\\n\",\n    \"    with open(readme_path, \\\"r\\\", encoding=\\\"utf-8\\\") as f:\\n\",\n    \"        contents = f.read()\\n\",\n    \"\\n\",\n    \"    contents = update_default_symbol_links(\\n\",\n    \"        contents, NbdevLookup(incl_libs=cfg.lib_path.name), \\\"\\\", \\\"\\\", False\\n\",\n    \"    )\\n\",\n    \"    contents = _fix_symbol_links(contents, \\\"./\\\", cfg.doc_host, cfg.doc_baseurl, False)\\n\",\n    \"\\n\",\n    \"    with open(readme_path, \\\"w\\\", encoding=\\\"utf-8\\\") as f:\\n\",\n    \"        f.write(contents)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d591af2d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Next, an object of the\\n\",\n      \"    [`FastKafka`](https://airtai.github.io/fastkafka/fastkafka.html#fastkafka)\\n\",\n      \"    class is initialized with the minimum set of arguments.\\n\",\n      \"    The service can be tested using the [`Tester`](https://airtai.github.io/fastkafka/tester.html#tester)\\n\",\n      \"    instances\\n\",\n      \"    \\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"@contextmanager\\n\",\n    \"def mock_nbdev_readme(mock_contents, d):\\n\",\n    \"    with patch('__main__.nbdev_readme') as mock_nbdev_readme:\\n\",\n    \"        mock_nbdev_readme.__wrapped__ = MagicMock()\\n\",\n    \"        with open((Path(d) / \\\"README.md\\\"), \\\"w\\\", encoding=\\\"utf-8\\\") as f:\\n\",\n    \"            f.write(mock_contents)\\n\",\n    \"        \\n\",\n    \"        yield\\n\",\n    \"        \\n\",\n    \"        \\n\",\n    \"\\n\",\n    \"_mock_nbdev_readme_return_value = \\\"\\\"\\\"Next, an object of the\\n\",\n    \"    [`FastKafka`](https://airtai.github.io/fastkafka/fastkafka.html#fastkafka)\\n\",\n    \"    class is initialized with the minimum set of arguments.\\n\",\n    \"    The service can be tested using the [`Tester`](https://airtai.github.io/fastkafka/tester.html#tester)\\n\",\n    \"    instances\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    with mock_nbdev_readme(_mock_nbdev_readme_return_value, d):\\n\",\n    \"        nbdev_readme.__wrapped__()\\n\",\n    \"        \\n\",\n    \"        with open((Path(d) / \\\"README.md\\\"), \\\"r\\\", encoding=\\\"utf-8\\\") as f:\\n\",\n    \"            contents = f.read()\\n\",\n    \"        \\n\",\n    \"        print(contents)\\n\",\n    \"        assert contents == _mock_nbdev_readme_return_value\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"190b0190\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"/tmp/tmp0a889_8g\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"class dotdict(dict):\\n\",\n    \"    \\\"\\\"\\\"dot.notation access to dictionary attributes\\\"\\\"\\\"\\n\",\n    \"    __getattr__ = dict.get\\n\",\n    \"    __setattr__ = dict.__setitem__\\n\",\n    \"    __delattr__ = dict.__delitem__\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@contextmanager\\n\",\n    \"def mock_get_config(d):\\n\",\n    \"    with patch(\\\"__main__.get_config\\\") as mock_get_config:\\n\",\n    \"        d = {\\n\",\n    \"            \\\"config_path\\\": Path(d),\\n\",\n    \"            \\\"doc_host\\\": \\\"https://airtai.github.io\\\",\\n\",\n    \"            \\\"doc_baseurl\\\": \\\"/fastkafka\\\",\\n\",\n    \"            \\\"lib_path\\\": Path(d) / \\\"fastkafka\\\",\\n\",\n    \"        }\\n\",\n    \"        mock_get_config.return_value = dotdict(d)\\n\",\n    \"        yield\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    with mock_get_config(d):\\n\",\n    \"        cfg = get_config()\\n\",\n    \"\\n\",\n    \"        print(cfg.config_path)\\n\",\n    \"        assert cfg.config_path == Path(d)\\n\",\n    \"        assert cfg.doc_host == \\\"https://airtai.github.io\\\"\\n\",\n    \"        assert cfg.lib_path.name == \\\"fastkafka\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e6d48549\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Next, an object of the\\n\",\n      \"    [`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\n\",\n      \"    class is initialized with the minimum set of arguments.\\n\",\n      \"    The service can be tested using the [`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n      \"    instances\\n\",\n      \"    \\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"fixture = \\\"\\\"\\\"Next, an object of the\\n\",\n    \"    [`FastKafka`](https://airtai.github.io/fastkafka/fastkafka.html#fastkafka)\\n\",\n    \"    class is initialized with the minimum set of arguments.\\n\",\n    \"    The service can be tested using the [`Tester`](https://airtai.github.io/fastkafka/tester.html#tester)\\n\",\n    \"    instances\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"Next, an object of the\\n\",\n    \"    [`FastKafka`](https://airtai.github.io/fastkafka/docs/api/fastkafka#fastkafka.FastKafka)\\n\",\n    \"    class is initialized with the minimum set of arguments.\\n\",\n    \"    The service can be tested using the [`Tester`](https://airtai.github.io/fastkafka/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester)\\n\",\n    \"    instances\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    readme_path = Path(d) / \\\"README.md\\\"\\n\",\n    \"    with mock_get_config(d):\\n\",\n    \"        with mock_nbdev_readme(fixture, d):\\n\",\n    \"            update_readme()\\n\",\n    \"\\n\",\n    \"        with open(readme_path, \\\"r\\\", encoding=\\\"utf-8\\\") as f:\\n\",\n    \"            actual = f.read()\\n\",\n    \"        print(actual)\\n\",\n    \"        assert actual == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"794269b1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python 3 (ipykernel)\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.11.4\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/096_Meta.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8690ce9e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.meta\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4f538d5f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import builtins\\n\",\n    \"import copy as cp\\n\",\n    \"import functools\\n\",\n    \"import inspect\\n\",\n    \"import sys\\n\",\n    \"import types\\n\",\n    \"from functools import partial, wraps\\n\",\n    \"from types import *\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import docstring_parser\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6c59ed2b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from contextlib import contextmanager\\n\",\n    \"\\n\",\n    \"from fastkafka._aiokafka_imports import AIOKafkaConsumer\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e49f29f9\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Fastcore replacement: patch & delegates\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9fcc9993\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# |exporti\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def test_eq(a: Any, b: Any) -> None:\\n\",\n    \"    \\\"`test` that `a==b`\\\"\\n\",\n    \"    if a != b:\\n\",\n    \"        raise ValueError(f\\\"{a} != {b}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3dd42876\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Patching\\n\",\n    \"\\n\",\n    \"> copied from https://github.com/fastai/fastcore/blob/master/nbs/01_basics.ipynb\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a621b3e2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# |exporti\\n\",\n    \"F = TypeVar(\\\"F\\\", bound=Callable[..., Any])\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def copy_func(f: Union[F, FunctionType]) -> Union[F, FunctionType]:\\n\",\n    \"    \\\"Copy a non-builtin function (NB `copy.copy` does not work for this)\\\"\\n\",\n    \"    if not isinstance(f, FunctionType):\\n\",\n    \"        return cp.copy(f)\\n\",\n    \"    fn = FunctionType(\\n\",\n    \"        f.__code__, f.__globals__, f.__name__, f.__defaults__, f.__closure__\\n\",\n    \"    )\\n\",\n    \"    fn.__kwdefaults__ = f.__kwdefaults__\\n\",\n    \"    fn.__dict__.update(f.__dict__)\\n\",\n    \"    fn.__annotations__.update(f.__annotations__)\\n\",\n    \"    fn.__qualname__ = f.__qualname__\\n\",\n    \"    fn.__doc__ = f.__doc__\\n\",\n    \"    return fn\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8d465db0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def foo():\\n\",\n    \"    \\\"\\\"\\\"Test doc\\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"a = cp.copy(foo)\\n\",\n    \"b = cp.deepcopy(foo)\\n\",\n    \"\\n\",\n    \"a.someattr = \\\"hello\\\"  # since a and b point at the same object, updating a will update b\\n\",\n    \"test_eq(b.someattr, \\\"hello\\\")\\n\",\n    \"\\n\",\n    \"assert a is foo and b is foo\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"739b940f\",\n   \"metadata\": {},\n   \"source\": [\n    \"However, with copy_func, you can retrieve a copy of a function without a reference to the original object:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"db9c3670\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"c = copy_func(foo)  # c is an indpendent object\\n\",\n    \"assert c is not foo\\n\",\n    \"assert c.__doc__ == \\\"\\\"\\\"Test doc\\\"\\\"\\\", c.__doc__\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"88934909\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def g(x, *, y=3):\\n\",\n    \"    return x + y\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_eq(copy_func(g)(4), 7)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"00f7e9ca\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# |exporti\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def patch_to(\\n\",\n    \"    cls: Union[Type, Iterable[Type]], as_prop: bool = False, cls_method: bool = False\\n\",\n    \") -> Callable[[F], F]:\\n\",\n    \"    \\\"Decorator: add `f` to `cls`\\\"\\n\",\n    \"    if not isinstance(cls, (tuple, list)):\\n\",\n    \"        cls = (cls,)  # type: ignore\\n\",\n    \"\\n\",\n    \"    def _inner(f: F) -> F:\\n\",\n    \"        for c_ in cls:\\n\",\n    \"            nf = copy_func(f)\\n\",\n    \"            nm = f.__name__\\n\",\n    \"            # `functools.update_wrapper` when passing patched function to `Pipeline`, so we do it manually\\n\",\n    \"            for o in functools.WRAPPER_ASSIGNMENTS:\\n\",\n    \"                setattr(nf, o, getattr(f, o))\\n\",\n    \"            nf.__qualname__ = f\\\"{c_.__name__}.{nm}\\\"\\n\",\n    \"            if cls_method:\\n\",\n    \"                setattr(c_, nm, MethodType(nf, c_))\\n\",\n    \"            else:\\n\",\n    \"                setattr(c_, nm, property(nf) if as_prop else nf)\\n\",\n    \"        # Avoid clobbering existing functions\\n\",\n    \"        # nosemgrep\\n\",\n    \"        existing_func = globals().get(nm, builtins.__dict__.get(nm, None))\\n\",\n    \"        return existing_func  # type: ignore\\n\",\n    \"\\n\",\n    \"    return _inner\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ee9ab22b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T3(int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch_to(_T3)\\n\",\n    \"def foo(self):\\n\",\n    \"    \\\"\\\"\\\"Test doc\\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert _T3.foo.__doc__ == \\\"\\\"\\\"Test doc\\\"\\\"\\\", foo.__doc__\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"740507c5\",\n   \"metadata\": {},\n   \"source\": [\n    \"     \\n\",\n    \"The @patch_to decorator allows you to monkey patch a function into a class as a method:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ce7b854a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T3(int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch_to(_T3)\\n\",\n    \"def func1(self, a):\\n\",\n    \"    return self + a\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T3(1)  # we initilized `t` to a type int = 1\\n\",\n    \"test_eq(t.func1(2), 3)  # we add 2 to `t`, so 2 + 1 = 3\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d73911c9\",\n   \"metadata\": {},\n   \"source\": [\n    \"     \\n\",\n    \"You can access instance properties in the usual way via self:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7dfcbf2d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T4:\\n\",\n    \"    def __init__(self, g):\\n\",\n    \"        self.g = g\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch_to(_T4)\\n\",\n    \"def greet(self, x):\\n\",\n    \"    return self.g + x\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T4(\\\"hello \\\")  # this sets self.g = 'helllo '\\n\",\n    \"test_eq(\\n\",\n    \"    t.greet(\\\"world\\\"), \\\"hello world\\\"\\n\",\n    \")  # t.greet('world') will append 'world' to 'hello '\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"5d6df22e\",\n   \"metadata\": {},\n   \"source\": [\n    \"     \\n\",\n    \"You can instead specify that the method should be a class method by setting cls_method=True:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3844687b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T5(int):\\n\",\n    \"    attr = 3  # attr is a class attribute we will access in a later method\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch_to(_T5, cls_method=True)\\n\",\n    \"def func(cls, x):\\n\",\n    \"    return cls.attr + x  # you can access class attributes in the normal way\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_eq(_T5.func(4), 7)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d4445ccf\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Additionally you can specify that the function you want to patch should be a class attribute with as_prop=True:\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch_to(_T5, as_prop=True)\\n\",\n    \"def add_ten(self):\\n\",\n    \"    return self + 10\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T5(4)\\n\",\n    \"test_eq(t.add_ten, 14)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4a1b91e7\",\n   \"metadata\": {},\n   \"source\": [\n    \"     \\n\",\n    \"Instead of passing one class to the @patch_to decorator, you can pass multiple classes in a tuple to simulteanously patch more than one class with the same method:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5a626f18\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T6(int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class _T7(int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch_to((_T6, _T7))\\n\",\n    \"def func_mult(self, a):\\n\",\n    \"    return self * a\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T6(2)\\n\",\n    \"test_eq(t.func_mult(4), 8)\\n\",\n    \"t = _T7(2)\\n\",\n    \"test_eq(t.func_mult(4), 8)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"89a579b8\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | exporti\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def eval_type(\\n\",\n    \"    t: Sequence, glb: Optional[Dict[str, Any]], loc: Optional[Mapping[str, object]]\\n\",\n    \") -> Any:\\n\",\n    \"    \\\"`eval` a type or collection of types, if needed, for annotations in py3.10+\\\"\\n\",\n    \"    if isinstance(t, str):\\n\",\n    \"        if \\\"|\\\" in t:\\n\",\n    \"            return Union[eval_type(tuple(t.split(\\\"|\\\")), glb, loc)]\\n\",\n    \"        # nosemgrep\\n\",\n    \"        return eval(t, glb, loc)  # nosec B307:blacklist\\n\",\n    \"    if isinstance(t, (tuple, list)):\\n\",\n    \"        return type(t)([eval_type(c, glb, loc) for c in t])\\n\",\n    \"    return t\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def union2tuple(t) -> Tuple[Any, ...]:  # type: ignore\\n\",\n    \"    if getattr(t, \\\"__origin__\\\", None) is Union:\\n\",\n    \"        return t.__args__  # type: ignore\\n\",\n    \"\\n\",\n    \"    if sys.version_info >= (3, 10):\\n\",\n    \"        if isinstance(t, UnionType):\\n\",\n    \"            return t.__args__\\n\",\n    \"\\n\",\n    \"    return t  # type: ignore\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_annotations_ex(\\n\",\n    \"    obj: Union[FunctionType, Type, F],\\n\",\n    \"    *,\\n\",\n    \"    globals: Optional[Dict[str, Any]] = None,\\n\",\n    \"    locals: Optional[Dict[str, Any]] = None,\\n\",\n    \") -> Tuple[Dict[str, Any], Union[Any, Dict[str, Any], None], Dict[str, Any]]:\\n\",\n    \"    \\\"Backport of py3.10 `get_annotations` that returns globals/locals\\\"\\n\",\n    \"    if isinstance(obj, type):\\n\",\n    \"        obj_dict = getattr(obj, \\\"__dict__\\\", None)\\n\",\n    \"        if obj_dict and hasattr(obj_dict, \\\"get\\\"):\\n\",\n    \"            ann = obj_dict.get(\\\"__annotations__\\\", None)\\n\",\n    \"            if isinstance(ann, types.GetSetDescriptorType):\\n\",\n    \"                ann = None\\n\",\n    \"        else:\\n\",\n    \"            ann = None\\n\",\n    \"\\n\",\n    \"        obj_globals = None\\n\",\n    \"        module_name = getattr(obj, \\\"__module__\\\", None)\\n\",\n    \"        if module_name:\\n\",\n    \"            module = sys.modules.get(module_name, None)\\n\",\n    \"            if module:\\n\",\n    \"                obj_globals = getattr(module, \\\"__dict__\\\", None)\\n\",\n    \"        obj_locals = dict(vars(obj))\\n\",\n    \"        unwrap = obj\\n\",\n    \"    elif isinstance(obj, types.ModuleType):\\n\",\n    \"        ann = getattr(obj, \\\"__annotations__\\\", None)\\n\",\n    \"        obj_globals = getattr(obj, \\\"__dict__\\\")\\n\",\n    \"        obj_locals, unwrap = None, None\\n\",\n    \"    elif callable(obj):\\n\",\n    \"        ann = getattr(obj, \\\"__annotations__\\\", None)\\n\",\n    \"        obj_globals = getattr(obj, \\\"__globals__\\\", None)\\n\",\n    \"        obj_locals, unwrap = None, obj  # type: ignore\\n\",\n    \"    else:\\n\",\n    \"        raise TypeError(f\\\"{obj!r} is not a module, class, or callable.\\\")\\n\",\n    \"\\n\",\n    \"    if ann is None:\\n\",\n    \"        ann = {}\\n\",\n    \"    if not isinstance(ann, dict):\\n\",\n    \"        raise ValueError(f\\\"{obj!r}.__annotations__ is neither a dict nor None\\\")\\n\",\n    \"    if not ann:\\n\",\n    \"        ann = {}\\n\",\n    \"\\n\",\n    \"    if unwrap is not None:\\n\",\n    \"        while True:\\n\",\n    \"            if hasattr(unwrap, \\\"__wrapped__\\\"):\\n\",\n    \"                unwrap = unwrap.__wrapped__\\n\",\n    \"                continue\\n\",\n    \"            if isinstance(unwrap, functools.partial):\\n\",\n    \"                unwrap = unwrap.func  # type: ignore\\n\",\n    \"                continue\\n\",\n    \"            break\\n\",\n    \"        if hasattr(unwrap, \\\"__globals__\\\"):\\n\",\n    \"            obj_globals = unwrap.__globals__\\n\",\n    \"\\n\",\n    \"    if globals is None:\\n\",\n    \"        globals = obj_globals\\n\",\n    \"    if locals is None:\\n\",\n    \"        locals = obj_locals\\n\",\n    \"\\n\",\n    \"    return dict(ann), globals, locals  # type: ignore\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"615f96d9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def patch(  # type: ignore\\n\",\n    \"    f: Optional[F] = None, *, as_prop: bool = False, cls_method: bool = False\\n\",\n    \"):\\n\",\n    \"    \\\"Decorator: add `f` to the first parameter's class (based on f's type annotations)\\\"\\n\",\n    \"    if f is None:\\n\",\n    \"        return partial(patch, as_prop=as_prop, cls_method=cls_method)\\n\",\n    \"    ann, glb, loc = get_annotations_ex(f)\\n\",\n    \"    cls = union2tuple(\\n\",\n    \"        eval_type(ann.pop(\\\"cls\\\") if cls_method else next(iter(ann.values())), glb, loc)\\n\",\n    \"    )\\n\",\n    \"    return patch_to(cls, as_prop=as_prop, cls_method=cls_method)(f)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"83a65ab4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T8(int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def func(self: _T8, a):\\n\",\n    \"    \\\"\\\"\\\"Test doc\\\"\\\"\\\"\\n\",\n    \"    return self + a\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert _T8.func.__doc__ == \\\"\\\"\\\"Test doc\\\"\\\"\\\", func.__doc__\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e45c054c\",\n   \"metadata\": {},\n   \"source\": [\n    \"     \\n\",\n    \"@patch is an alternative to @patch_to that allows you similarly monkey patch class(es) by using type annotations:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1d5f6b0e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T8(int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def func(self: _T8, a):\\n\",\n    \"    return self + a\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T8(1)  # we initilized `t` to a type int = 1\\n\",\n    \"test_eq(t.func(3), 4)  # we add 3 to `t`, so 3 + 1 = 4\\n\",\n    \"test_eq(t.func.__qualname__, \\\"_T8.func\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"dae3414e\",\n   \"metadata\": {},\n   \"source\": [\n    \"Similarly to patch_to, you can supply a union of classes instead of a single class in your type annotations to patch multiple classes:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"367a359d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T9(int):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch\\n\",\n    \"def func2(x: Union[_T8, _T9], a):\\n\",\n    \"    return x * a  # will patch both _T8 and _T9\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T8(2)\\n\",\n    \"test_eq(t.func2(4), 8)\\n\",\n    \"test_eq(t.func2.__qualname__, \\\"_T8.func2\\\")\\n\",\n    \"\\n\",\n    \"t = _T9(2)\\n\",\n    \"test_eq(t.func2(4), 8)\\n\",\n    \"test_eq(t.func2.__qualname__, \\\"_T9.func2\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6dbb3d5a\",\n   \"metadata\": {},\n   \"source\": [\n    \"     \\n\",\n    \"Just like patch_to decorator you can use as_prop and cls_method parameters with patch decorator:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1eed1b2b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@patch(as_prop=True)\\n\",\n    \"def add_ten(self: _T5):\\n\",\n    \"    return self + 10\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T5(4)\\n\",\n    \"test_eq(t.add_ten, 14)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a1a6407\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class _T5(int):\\n\",\n    \"    attr = 3  # attr is a class attribute we will access in a later method\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@patch(cls_method=True)\\n\",\n    \"def func(cls: _T5, x):\\n\",\n    \"    return cls.attr + x  # you can access class attributes in the normal way\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_eq(_T5.func(4), 7)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"391aec1c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"95cafec7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def test_sig(f, b):\\n\",\n    \"    \\\"Test the signature of an object\\\"\\n\",\n    \"    if str(inspect.signature(f)) != b:\\n\",\n    \"        raise ValueError(f\\\"{inspect.signature(f)} != {b}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"855fdd0a\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Fastcore meta deps\\n\",\n    \"\\n\",\n    \"> Copied from https://github.com/fastai/fastcore/blob/master/nbs/07_meta.ipynb\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6bb9c8b3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# |export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _delegates_without_docs(\\n\",\n    \"    to: Optional[F] = None,  # Delegatee\\n\",\n    \"    keep: bool = False,  # Keep `kwargs` in decorated function?\\n\",\n    \"    but: Optional[List[str]] = None,  # Exclude these parameters from signature\\n\",\n    \") -> Callable[[F], F]:\\n\",\n    \"    \\\"Decorator: replace `**kwargs` in signature with params from `to`\\\"\\n\",\n    \"    if but is None:\\n\",\n    \"        but = []\\n\",\n    \"\\n\",\n    \"    def _f(f: F) -> F:\\n\",\n    \"        if to is None:\\n\",\n    \"            to_f, from_f = f.__base__.__init__, f.__init__  # type: ignore\\n\",\n    \"        else:\\n\",\n    \"            to_f, from_f = to.__init__ if isinstance(to, type) else to, f  # type: ignore\\n\",\n    \"        from_f = getattr(from_f, \\\"__func__\\\", from_f)\\n\",\n    \"        to_f = getattr(to_f, \\\"__func__\\\", to_f)\\n\",\n    \"        if hasattr(from_f, \\\"__delwrap__\\\"):\\n\",\n    \"            return f\\n\",\n    \"        sig = inspect.signature(from_f)\\n\",\n    \"        sigd = dict(sig.parameters)\\n\",\n    \"        if \\\"kwargs\\\" in sigd:\\n\",\n    \"            k = sigd.pop(\\\"kwargs\\\")\\n\",\n    \"        else:\\n\",\n    \"            k = None\\n\",\n    \"        s2 = {\\n\",\n    \"            k: v.replace(kind=inspect.Parameter.KEYWORD_ONLY)\\n\",\n    \"            for k, v in inspect.signature(to_f).parameters.items()\\n\",\n    \"            if v.default != inspect.Parameter.empty and k not in sigd and k not in but  # type: ignore\\n\",\n    \"        }\\n\",\n    \"        anno = {\\n\",\n    \"            k: v\\n\",\n    \"            for k, v in getattr(to_f, \\\"__annotations__\\\", {}).items()\\n\",\n    \"            if k not in sigd and k not in but  # type: ignore\\n\",\n    \"        }\\n\",\n    \"        sigd.update(s2)\\n\",\n    \"        if keep and k is not None:\\n\",\n    \"            sigd[\\\"kwargs\\\"] = k\\n\",\n    \"        else:\\n\",\n    \"            from_f.__delwrap__ = to_f\\n\",\n    \"        from_f.__signature__ = sig.replace(parameters=list(sigd.values()))\\n\",\n    \"        if hasattr(from_f, \\\"__annotations__\\\"):\\n\",\n    \"            from_f.__annotations__.update(anno)\\n\",\n    \"        return f\\n\",\n    \"\\n\",\n    \"    return _f\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8cb1c2e5\",\n   \"metadata\": {},\n   \"source\": [\n    \"A common Python idiom is to accept **kwargs in addition to named parameters that are passed onto other function calls. It is especially common to use **kwargs when you want to give the user an option to override default parameters of any functions or methods being called by the parent function.\\n\",\n    \"\\n\",\n    \"For example, suppose we have have a function foo that passes arguments to baz like so:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9a80423a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def baz(a, b: int = 2, c: int = 3) -> int:\\n\",\n    \"    \\\"\\\"\\\"Baz\\n\",\n    \"    Params:\\n\",\n    \"        a: something\\n\",\n    \"        b: whatever\\n\",\n    \"        c: whocares\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Nada\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return a + b + c\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def foo(c, a, **kwargs):\\n\",\n    \"    return c + baz(a, **kwargs)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert foo(c=1, a=1) == 7\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c9acda69\",\n   \"metadata\": {},\n   \"source\": [\n    \"The problem with this approach is the api for foo is obfuscated. Users cannot introspect what the valid arguments for **kwargs are without reading the source code. When a user tries tries to introspect the signature of foo, they are presented with this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1ad04fa0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<Signature (c, a, **kwargs)>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"inspect.signature(foo)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a1a2627f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<Signature (a, b: int = 2, c: int = 3) -> int>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"inspect.signature(baz)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b0c167e8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Baz\\n\",\n      \"    Params:\\n\",\n      \"        a: something\\n\",\n      \"        b: whatever\\n\",\n      \"        c: whocares\\n\",\n      \"\\n\",\n      \"    Returns:\\n\",\n      \"        Nada\\n\",\n      \"    \\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"print(baz.__doc__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c1668d82\",\n   \"metadata\": {},\n   \"source\": [\n    \"We can address this issue by using the decorator delegates to include parameters from other functions. For example, if we apply the delegates decorator to foo to include parameters from baz:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5de7978e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<Signature (c, a, *, b: int = 2)>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"@_delegates_without_docs(baz)\\n\",\n    \"def foo(c, a, **kwargs):\\n\",\n    \"    \\\"\\\"\\\"Foo is great\\n\",\n    \"\\n\",\n    \"    Params:\\n\",\n    \"        c: c from foo\\n\",\n    \"        a: a from foo\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return c + baz(a, **kwargs)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_sig(foo, \\\"(c, a, *, b: int = 2)\\\")\\n\",\n    \"assert (\\n\",\n    \"    foo.__doc__\\n\",\n    \"    == \\\"Foo is great\\\\n\\\\n    Params:\\\\n        c: c from foo\\\\n        a: a from foo\\\\n    \\\"\\n\",\n    \")\\n\",\n    \"inspect.signature(foo)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dd13be32\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"'Foo is great\\\\n\\\\n    Params:\\\\n        c: c from foo\\\\n        a: a from foo\\\\n    '\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"foo.__doc__\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5d4b88e5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _format_args(xs: List[docstring_parser.DocstringParam]) -> str:\\n\",\n    \"    return \\\"\\\\nArgs:\\\\n - \\\" + \\\"\\\\n - \\\".join(\\n\",\n    \"        [f\\\"{x.arg_name} ({x.type_name}): {x.description}\\\" for x in xs]\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def combine_params(\\n\",\n    \"    f: F, o: Union[Type, Callable[..., Any]], but: Optional[List[str]] = None\\n\",\n    \") -> F:\\n\",\n    \"    \\\"\\\"\\\"Combines docstring arguments of a function and another object or function\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        f: destination functions where combined arguments will end up\\n\",\n    \"        o: source function from which arguments are taken from\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Function f with augumented docstring including arguments from both functions/objects\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if but is None:\\n\",\n    \"        but = []\\n\",\n    \"\\n\",\n    \"    src_params = docstring_parser.parse_from_object(o).params\\n\",\n    \"    #     logger.info(f\\\"combine_params(): source:{_format_args(src_params)}\\\")\\n\",\n    \"    docs = docstring_parser.parse_from_object(f)\\n\",\n    \"    #     logger.info(f\\\"combine_params(): destination:{_format_args(docs.params)}\\\")\\n\",\n    \"    dst_params_names = [p.arg_name for p in docs.params]\\n\",\n    \"\\n\",\n    \"    combined_params = docs.params + [\\n\",\n    \"        x\\n\",\n    \"        for x in src_params\\n\",\n    \"        if x.arg_name not in dst_params_names and x.arg_name not in but\\n\",\n    \"    ]\\n\",\n    \"    #     logger.info(f\\\"combine_params(): combined:{_format_args(combined_params)}\\\")\\n\",\n    \"\\n\",\n    \"    docs.meta = [\\n\",\n    \"        x for x in docs.meta if not isinstance(x, docstring_parser.DocstringParam)\\n\",\n    \"    ] + combined_params  # type: ignore\\n\",\n    \"\\n\",\n    \"    f.__doc__ = docstring_parser.compose(\\n\",\n    \"        docs, style=docstring_parser.DocstringStyle.GOOGLE\\n\",\n    \"    )\\n\",\n    \"    return f\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c76e597d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def f2(a: int = 0, b: str = \\\"nada\\\"):\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Args:\\n\",\n    \"        a: parameter a\\n\",\n    \"        b: parameter bbbb\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def f1(b: str, c: int, **kwargs):\\n\",\n    \"    \\\"\\\"\\\"Function f1\\n\",\n    \"    Args:\\n\",\n    \"        b: parameter b\\n\",\n    \"        c: parameter c\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: sometimes\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"combine_params(f1, f2).__doc__\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"Function f1\\n\",\n    \"Args:\\n\",\n    \"    b: parameter b\\n\",\n    \"    c: parameter c\\n\",\n    \"    a: parameter a\\n\",\n    \"\\n\",\n    \"Raises:\\n\",\n    \"    ValueError: sometimes\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"assert f1.__doc__ == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e98db95a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Add test case to test combine_params with 'but' param\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def f2(a: int = 0, b: str = \\\"nada\\\", d: str = \\\"dada\\\"):\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Args:\\n\",\n    \"        a: parameter a\\n\",\n    \"        b: parameter bbbb\\n\",\n    \"        d: parameter d\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def f1(b: str, c: int):\\n\",\n    \"    \\\"\\\"\\\"Function f1\\n\",\n    \"    Args:\\n\",\n    \"        b: parameter b\\n\",\n    \"        c: parameter c\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: sometimes\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"combine_params(f1, f2, but=[\\\"d\\\"]).__doc__\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"Function f1\\n\",\n    \"Args:\\n\",\n    \"    b: parameter b\\n\",\n    \"    c: parameter c\\n\",\n    \"    a: parameter a\\n\",\n    \"\\n\",\n    \"Raises:\\n\",\n    \"    ValueError: sometimes\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"assert f1.__doc__ == expected, f1.__doc__\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a06f975f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def delegates(\\n\",\n    \"    o: Union[Type, Callable[..., Any]],\\n\",\n    \"    keep: bool = False,\\n\",\n    \"    but: Optional[List[str]] = None,\\n\",\n    \") -> Callable[[F], F]:\\n\",\n    \"    \\\"\\\"\\\"Delegates keyword agruments from o to the function the decorator is applied to\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        o: object (class or function) with default kwargs\\n\",\n    \"        keep: Keep `kwargs` in decorated function?\\n\",\n    \"        but: argument names not to include\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def _inner(f: F, keep: bool = keep, but: Optional[List[str]] = but) -> F:\\n\",\n    \"        def _combine_params(\\n\",\n    \"            o: Union[Type, Callable[..., Any]], but: Optional[List[str]] = None\\n\",\n    \"        ) -> Callable[[F], F]:\\n\",\n    \"            def __combine_params(\\n\",\n    \"                f: F,\\n\",\n    \"                o: Union[Type, Callable[..., Any]] = o,\\n\",\n    \"                but: Optional[List[str]] = but,\\n\",\n    \"            ) -> F:\\n\",\n    \"                return combine_params(f=f, o=o, but=but)\\n\",\n    \"\\n\",\n    \"            return __combine_params\\n\",\n    \"\\n\",\n    \"        @_combine_params(o, but=but)  # type: ignore\\n\",\n    \"        @_delegates_without_docs(o, keep=keep, but=but)  # type: ignore\\n\",\n    \"        @wraps(f)\\n\",\n    \"        def _f(*args: Any, **kwargs: Any) -> Any:\\n\",\n    \"            return f(*args, **kwargs)\\n\",\n    \"\\n\",\n    \"        return _f\\n\",\n    \"\\n\",\n    \"    return _inner\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"827294c5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def f2(a: str = \\\"whatever\\\", d: int = 42) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Args:\\n\",\n    \"        a: parameter a\\n\",\n    \"        b: parameter bbbb\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(f2)\\n\",\n    \"def f1(b: str, c: int, **kwargs):\\n\",\n    \"    \\\"\\\"\\\"Function f1\\n\",\n    \"    Args:\\n\",\n    \"        b: parameter b\\n\",\n    \"        c: parameter c\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: sometimes\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"Function f1\\n\",\n    \"Args:\\n\",\n    \"    b: parameter b\\n\",\n    \"    c: parameter c\\n\",\n    \"    a: parameter a\\n\",\n    \"\\n\",\n    \"Raises:\\n\",\n    \"    ValueError: sometimes\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"assert f1.__doc__ == expected\\n\",\n    \"assert len(inspect.signature(f2).parameters) == 2\\n\",\n    \"assert len(inspect.signature(f1).parameters) == 4\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4443b59d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Add test case to test delegates with 'but' param\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def p1(a: int = 0, b: float = 0.1):\\n\",\n    \"    \\\"\\\"\\\"Func p1\\n\",\n    \"    Args:\\n\",\n    \"        a: hello\\n\",\n    \"        b: bello\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(p1, but=[\\\"b\\\"])\\n\",\n    \"def p2(c: int, d: float):\\n\",\n    \"    \\\"\\\"\\\"Func p2\\n\",\n    \"    Args:\\n\",\n    \"        c: cello\\n\",\n    \"        d: dello\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"expected = \\\"\\\"\\\"Func p2\\n\",\n    \"Args:\\n\",\n    \"    c: cello\\n\",\n    \"    d: dello\\n\",\n    \"    a: hello\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"assert p2.__doc__ == expected, p2.__doc__\\n\",\n    \"assert len(inspect.signature(p2).parameters) == 3\\n\",\n    \"assert len(inspect.signature(p1).parameters) == 2\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"30a06d70\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"mappingproxy({'b': <Parameter \\\"b: str\\\">,\\n\",\n       \"              'c': <Parameter \\\"c: int\\\">,\\n\",\n       \"              'a': <Parameter \\\"a: str = 'whatever'\\\">,\\n\",\n       \"              'd': <Parameter \\\"d: int = 42\\\">})\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"@delegates(f2)\\n\",\n    \"def f3(b: str, c: int, **kwargs):\\n\",\n    \"    \\\"\\\"\\\"Function f1\\n\",\n    \"    Args:\\n\",\n    \"        b: parameter b\\n\",\n    \"        c: parameter c\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: sometimes\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"params = inspect.signature(f3).parameters\\n\",\n    \"display(params)\\n\",\n    \"assert len(params) == 4\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"31ea7197\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"function a\\n\",\n      \"\\n\",\n      \"Args:\\n\",\n      \"    a: parameter a\\n\",\n      \"    *topics (list(str)): optional list of topics to subscribe to. If not set,\\n\",\n      \"        call :meth:`.subscribe` or :meth:`.assign` before consuming records.\\n\",\n      \"        Passing topics directly is same as calling :meth:`.subscribe` API.\\n\",\n      \"    bootstrap_servers (str, list(str)): a ``host[:port]`` string (or list of\\n\",\n      \"        ``host[:port]`` strings) that the consumer should contact to bootstrap\\n\",\n      \"        initial cluster metadata.\\n\",\n      \"        \\n\",\n      \"        This does not have to be the full node list.\\n\",\n      \"        It just needs to have at least one broker that will respond to a\\n\",\n      \"        Metadata API Request. Default port is 9092. If no servers are\\n\",\n      \"        specified, will default to ``localhost:9092``.\\n\",\n      \"    client_id (str): a name for this client. This string is passed in\\n\",\n      \"        each request to servers and can be used to identify specific\\n\",\n      \"        server-side log entries that correspond to this client. Also\\n\",\n      \"        submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\\n\",\n      \"        for logging with respect to consumer group administration. Default:\\n\",\n      \"        ``aiokafka-{version}``\\n\",\n      \"    group_id (str or None): name of the consumer group to join for dynamic\\n\",\n      \"        partition assignment (if enabled), and to use for fetching and\\n\",\n      \"        committing offsets. If None, auto-partition assignment (via\\n\",\n      \"        group coordinator) and offset commits are disabled.\\n\",\n      \"        Default: None\\n\",\n      \"    key_deserializer (Callable): Any callable that takes a\\n\",\n      \"        raw message key and returns a deserialized key.\\n\",\n      \"    value_deserializer (Callable, Optional): Any callable that takes a\\n\",\n      \"        raw message value and returns a deserialized value.\\n\",\n      \"    fetch_min_bytes (int): Minimum amount of data the server should\\n\",\n      \"        return for a fetch request, otherwise wait up to\\n\",\n      \"        `fetch_max_wait_ms` for more data to accumulate. Default: 1.\\n\",\n      \"    fetch_max_bytes (int): The maximum amount of data the server should\\n\",\n      \"        return for a fetch request. This is not an absolute maximum, if\\n\",\n      \"        the first message in the first non-empty partition of the fetch\\n\",\n      \"        is larger than this value, the message will still be returned\\n\",\n      \"        to ensure that the consumer can make progress. NOTE: consumer\\n\",\n      \"        performs fetches to multiple brokers in parallel so memory\\n\",\n      \"        usage will depend on the number of brokers containing\\n\",\n      \"        partitions for the topic.\\n\",\n      \"        Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\\n\",\n      \"    fetch_max_wait_ms (int): The maximum amount of time in milliseconds\\n\",\n      \"        the server will block before answering the fetch request if\\n\",\n      \"        there isn't sufficient data to immediately satisfy the\\n\",\n      \"        requirement given by fetch_min_bytes. Default: 500.\\n\",\n      \"    max_partition_fetch_bytes (int): The maximum amount of data\\n\",\n      \"        per-partition the server will return. The maximum total memory\\n\",\n      \"        used for a request ``= #partitions * max_partition_fetch_bytes``.\\n\",\n      \"        This size must be at least as large as the maximum message size\\n\",\n      \"        the server allows or else it is possible for the producer to\\n\",\n      \"        send messages larger than the consumer can fetch. If that\\n\",\n      \"        happens, the consumer can get stuck trying to fetch a large\\n\",\n      \"        message on a certain partition. Default: 1048576.\\n\",\n      \"    max_poll_records (int): The maximum number of records returned in a\\n\",\n      \"        single call to :meth:`.getmany`. Defaults ``None``, no limit.\\n\",\n      \"    request_timeout_ms (int): Client request timeout in milliseconds.\\n\",\n      \"        Default: 40000.\\n\",\n      \"    retry_backoff_ms (int): Milliseconds to backoff when retrying on\\n\",\n      \"        errors. Default: 100.\\n\",\n      \"    auto_offset_reset (str): A policy for resetting offsets on\\n\",\n      \"        :exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\\n\",\n      \"        available message, ``latest`` will move to the most recent, and\\n\",\n      \"        ``none`` will raise an exception so you can handle this case.\\n\",\n      \"        Default: ``latest``.\\n\",\n      \"    enable_auto_commit (bool): If true the consumer's offset will be\\n\",\n      \"        periodically committed in the background. Default: True.\\n\",\n      \"    auto_commit_interval_ms (int): milliseconds between automatic\\n\",\n      \"        offset commits, if enable_auto_commit is True. Default: 5000.\\n\",\n      \"    check_crcs (bool): Automatically check the CRC32 of the records\\n\",\n      \"        consumed. This ensures no on-the-wire or on-disk corruption to\\n\",\n      \"        the messages occurred. This check adds some overhead, so it may\\n\",\n      \"        be disabled in cases seeking extreme performance. Default: True\\n\",\n      \"    metadata_max_age_ms (int): The period of time in milliseconds after\\n\",\n      \"        which we force a refresh of metadata even if we haven't seen any\\n\",\n      \"        partition leadership changes to proactively discover any new\\n\",\n      \"        brokers or partitions. Default: 300000\\n\",\n      \"    partition_assignment_strategy (list): List of objects to use to\\n\",\n      \"        distribute partition ownership amongst consumer instances when\\n\",\n      \"        group management is used. This preference is implicit in the order\\n\",\n      \"        of the strategies in the list. When assignment strategy changes:\\n\",\n      \"        to support a change to the assignment strategy, new versions must\\n\",\n      \"        enable support both for the old assignment strategy and the new\\n\",\n      \"        one. The coordinator will choose the old assignment strategy until\\n\",\n      \"        all members have been updated. Then it will choose the new\\n\",\n      \"        strategy. Default: [:class:`.RoundRobinPartitionAssignor`]\\n\",\n      \"    max_poll_interval_ms (int): Maximum allowed time between calls to\\n\",\n      \"        consume messages (e.g., :meth:`.getmany`). If this interval\\n\",\n      \"        is exceeded the consumer is considered failed and the group will\\n\",\n      \"        rebalance in order to reassign the partitions to another consumer\\n\",\n      \"        group member. If API methods block waiting for messages, that time\\n\",\n      \"        does not count against this timeout. See `KIP-62`_ for more\\n\",\n      \"        information. Default 300000\\n\",\n      \"    rebalance_timeout_ms (int): The maximum time server will wait for this\\n\",\n      \"        consumer to rejoin the group in a case of rebalance. In Java client\\n\",\n      \"        this behaviour is bound to `max.poll.interval.ms` configuration,\\n\",\n      \"        but as ``aiokafka`` will rejoin the group in the background, we\\n\",\n      \"        decouple this setting to allow finer tuning by users that use\\n\",\n      \"        :class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\\n\",\n      \"        to ``session_timeout_ms``\\n\",\n      \"    session_timeout_ms (int): Client group session and failure detection\\n\",\n      \"        timeout. The consumer sends periodic heartbeats\\n\",\n      \"        (`heartbeat.interval.ms`) to indicate its liveness to the broker.\\n\",\n      \"        If no hearts are received by the broker for a group member within\\n\",\n      \"        the session timeout, the broker will remove the consumer from the\\n\",\n      \"        group and trigger a rebalance. The allowed range is configured with\\n\",\n      \"        the **broker** configuration properties\\n\",\n      \"        `group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\\n\",\n      \"        Default: 10000\\n\",\n      \"    heartbeat_interval_ms (int): The expected time in milliseconds\\n\",\n      \"        between heartbeats to the consumer coordinator when using\\n\",\n      \"        Kafka's group management feature. Heartbeats are used to ensure\\n\",\n      \"        that the consumer's session stays active and to facilitate\\n\",\n      \"        rebalancing when new consumers join or leave the group. The\\n\",\n      \"        value must be set lower than `session_timeout_ms`, but typically\\n\",\n      \"        should be set no higher than 1/3 of that value. It can be\\n\",\n      \"        adjusted even lower to control the expected time for normal\\n\",\n      \"        rebalances. Default: 3000\\n\",\n      \"    consumer_timeout_ms (int): maximum wait timeout for background fetching\\n\",\n      \"        routine. Mostly defines how fast the system will see rebalance and\\n\",\n      \"        request new data for new partitions. Default: 200\\n\",\n      \"    api_version (str): specify which kafka API version to use.\\n\",\n      \"        :class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\\n\",\n      \"        If set to ``auto``, will attempt to infer the broker version by\\n\",\n      \"        probing various APIs. Default: ``auto``\\n\",\n      \"    security_protocol (str): Protocol used to communicate with brokers.\\n\",\n      \"        Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\\n\",\n      \"    ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\\n\",\n      \"        for wrapping socket connections. Directly passed into asyncio's\\n\",\n      \"        :meth:`~asyncio.loop.create_connection`. For more information see\\n\",\n      \"        :ref:`ssl_auth`. Default: None.\\n\",\n      \"    exclude_internal_topics (bool): Whether records from internal topics\\n\",\n      \"        (such as offsets) should be exposed to the consumer. If set to True\\n\",\n      \"        the only way to receive records from an internal topic is\\n\",\n      \"        subscribing to it. Requires 0.10+ Default: True\\n\",\n      \"    connections_max_idle_ms (int): Close idle connections after the number\\n\",\n      \"        of milliseconds specified by this config. Specifying `None` will\\n\",\n      \"        disable idle checks. Default: 540000 (9 minutes).\\n\",\n      \"    isolation_level (str): Controls how to read messages written\\n\",\n      \"        transactionally.\\n\",\n      \"        \\n\",\n      \"        If set to ``read_committed``, :meth:`.getmany` will only return\\n\",\n      \"        transactional messages which have been committed.\\n\",\n      \"        If set to ``read_uncommitted`` (the default), :meth:`.getmany` will\\n\",\n      \"        return all messages, even transactional messages which have been\\n\",\n      \"        aborted.\\n\",\n      \"        \\n\",\n      \"        Non-transactional messages will be returned unconditionally in\\n\",\n      \"        either mode.\\n\",\n      \"        \\n\",\n      \"        Messages will always be returned in offset order. Hence, in\\n\",\n      \"        `read_committed` mode, :meth:`.getmany` will only return\\n\",\n      \"        messages up to the last stable offset (LSO), which is the one less\\n\",\n      \"        than the offset of the first open transaction. In particular any\\n\",\n      \"        messages appearing after messages belonging to ongoing transactions\\n\",\n      \"        will be withheld until the relevant transaction has been completed.\\n\",\n      \"        As a result, `read_committed` consumers will not be able to read up\\n\",\n      \"        to the high watermark when there are in flight transactions.\\n\",\n      \"        Further, when in `read_committed` the seek_to_end method will\\n\",\n      \"        return the LSO. See method docs below. Default: ``read_uncommitted``\\n\",\n      \"    sasl_mechanism (str): Authentication mechanism when security_protocol\\n\",\n      \"        is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\\n\",\n      \"        ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\\n\",\n      \"        ``OAUTHBEARER``.\\n\",\n      \"        Default: ``PLAIN``\\n\",\n      \"    sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\\n\",\n      \"        Default: None\\n\",\n      \"    sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\\n\",\n      \"        Default: None\\n\",\n      \"    sasl_oauth_token_provider (~aiokafka.abc.AbstractTokenProvider): OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\\n\",\n      \"        Default: None\\n\",\n      \"\\n\",\n      \"Returns:\\n\",\n      \"    : things not stuff\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"@delegates(AIOKafkaConsumer)\\n\",\n    \"def f(a: int, **kwargs) -> str:\\n\",\n    \"    \\\"\\\"\\\"function a\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        a: parameter a\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        things not stuff\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    print(f\\\"{a=}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert len(f.__doc__) > 5000\\n\",\n    \"\\n\",\n    \"print(f.__doc__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a6b11e13\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"mappingproxy({'a': <Parameter \\\"a: int\\\">,\\n\",\n       \"              'loop': <Parameter \\\"loop=None\\\">,\\n\",\n       \"              'bootstrap_servers': <Parameter \\\"bootstrap_servers='localhost'\\\">,\\n\",\n       \"              'client_id': <Parameter \\\"client_id='aiokafka-0.8.0'\\\">,\\n\",\n       \"              'group_id': <Parameter \\\"group_id=None\\\">,\\n\",\n       \"              'key_deserializer': <Parameter \\\"key_deserializer=None\\\">,\\n\",\n       \"              'value_deserializer': <Parameter \\\"value_deserializer=None\\\">,\\n\",\n       \"              'fetch_max_wait_ms': <Parameter \\\"fetch_max_wait_ms=500\\\">,\\n\",\n       \"              'fetch_max_bytes': <Parameter \\\"fetch_max_bytes=52428800\\\">,\\n\",\n       \"              'fetch_min_bytes': <Parameter \\\"fetch_min_bytes=1\\\">,\\n\",\n       \"              'max_partition_fetch_bytes': <Parameter \\\"max_partition_fetch_bytes=1048576\\\">,\\n\",\n       \"              'request_timeout_ms': <Parameter \\\"request_timeout_ms=40000\\\">,\\n\",\n       \"              'retry_backoff_ms': <Parameter \\\"retry_backoff_ms=100\\\">,\\n\",\n       \"              'auto_offset_reset': <Parameter \\\"auto_offset_reset='latest'\\\">,\\n\",\n       \"              'enable_auto_commit': <Parameter \\\"enable_auto_commit=True\\\">,\\n\",\n       \"              'auto_commit_interval_ms': <Parameter \\\"auto_commit_interval_ms=5000\\\">,\\n\",\n       \"              'check_crcs': <Parameter \\\"check_crcs=True\\\">,\\n\",\n       \"              'metadata_max_age_ms': <Parameter \\\"metadata_max_age_ms=300000\\\">,\\n\",\n       \"              'partition_assignment_strategy': <Parameter \\\"partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)\\\">,\\n\",\n       \"              'max_poll_interval_ms': <Parameter \\\"max_poll_interval_ms=300000\\\">,\\n\",\n       \"              'rebalance_timeout_ms': <Parameter \\\"rebalance_timeout_ms=None\\\">,\\n\",\n       \"              'session_timeout_ms': <Parameter \\\"session_timeout_ms=10000\\\">,\\n\",\n       \"              'heartbeat_interval_ms': <Parameter \\\"heartbeat_interval_ms=3000\\\">,\\n\",\n       \"              'consumer_timeout_ms': <Parameter \\\"consumer_timeout_ms=200\\\">,\\n\",\n       \"              'max_poll_records': <Parameter \\\"max_poll_records=None\\\">,\\n\",\n       \"              'ssl_context': <Parameter \\\"ssl_context=None\\\">,\\n\",\n       \"              'security_protocol': <Parameter \\\"security_protocol='PLAINTEXT'\\\">,\\n\",\n       \"              'api_version': <Parameter \\\"api_version='auto'\\\">,\\n\",\n       \"              'exclude_internal_topics': <Parameter \\\"exclude_internal_topics=True\\\">,\\n\",\n       \"              'connections_max_idle_ms': <Parameter \\\"connections_max_idle_ms=540000\\\">,\\n\",\n       \"              'isolation_level': <Parameter \\\"isolation_level='read_uncommitted'\\\">,\\n\",\n       \"              'sasl_mechanism': <Parameter \\\"sasl_mechanism='PLAIN'\\\">,\\n\",\n       \"              'sasl_plain_password': <Parameter \\\"sasl_plain_password=None\\\">,\\n\",\n       \"              'sasl_plain_username': <Parameter \\\"sasl_plain_username=None\\\">,\\n\",\n       \"              'sasl_kerberos_service_name': <Parameter \\\"sasl_kerberos_service_name='kafka'\\\">,\\n\",\n       \"              'sasl_kerberos_domain_name': <Parameter \\\"sasl_kerberos_domain_name=None\\\">,\\n\",\n       \"              'sasl_oauth_token_provider': <Parameter \\\"sasl_oauth_token_provider=None\\\">})\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"inspect.signature(f).parameters\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d5f22fcf\",\n   \"metadata\": {},\n   \"source\": [\n    \"We can optionally decide to keep **kwargs by setting keep=True:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"71c2fef6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<Signature (c, a, *, b: int = 2, **kwargs)>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"@delegates(baz, keep=True)\\n\",\n    \"def foo(c, a, **kwargs):\\n\",\n    \"    return c + baz(a, **kwargs)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"inspect.signature(foo)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"53cde9c4\",\n   \"metadata\": {},\n   \"source\": [\n    \"It is important to note that only parameters with default parameters are included. For example, in the below scenario only c, but NOT e and d are included in the signature of foo after applying delegates:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8ac19ca4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<Signature (a, b=1, *, c=2)>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def basefoo(e, d, c=2):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(basefoo)\\n\",\n    \"def foo(a, b=1, **kwargs):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"inspect.signature(\\n\",\n    \"    foo\\n\",\n    \")  # e and d are not included b/c they don't have default parameters.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"25b99291\",\n   \"metadata\": {},\n   \"source\": [\n    \"The reason that required arguments (i.e. those without default parameters) are automatically excluded is that you should be explicitly implementing required arguments into your function's signature rather than relying on delegates.\\n\",\n    \"\\n\",\n    \"Additionally, you can exclude specific parameters from being included in the signature with the but parameter. In the example below, we exclude the parameter d:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"90b04168\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<Signature (a, b=1, *, c=2)>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def basefoo(e, c=2, d=3):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(basefoo, but=[\\\"d\\\"])\\n\",\n    \"def foo(a, b=1, **kwargs):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_sig(foo, \\\"(a, b=1, *, c=2)\\\")\\n\",\n    \"inspect.signature(foo)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2961cb40\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can also use delegates between methods in a class. Here is an example of delegates with class methods:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5a23bc7b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# example 1: class methods\\n\",\n    \"class _T:\\n\",\n    \"    @classmethod\\n\",\n    \"    def foo(cls, a=1, b=2):\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @classmethod\\n\",\n    \"    @delegates(foo)\\n\",\n    \"    def bar(cls, c=3, **kwargs):\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_sig(_T.bar, \\\"(c=3, *, a=1, b=2)\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1d2bb41e\",\n   \"metadata\": {},\n   \"source\": [\n    \"Here is the same example with instance methods:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"789a9cc3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# example 2: instance methods\\n\",\n    \"class _T:\\n\",\n    \"    def foo(self, a=1, b=2):\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @delegates(foo)\\n\",\n    \"    def bar(self, c=3, **kwargs):\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"t = _T()\\n\",\n    \"test_sig(t.bar, \\\"(c=3, *, a=1, b=2)\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"97b9eb07\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can also delegate between classes. By default, the delegates decorator will delegate to the superclass:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3d397cd2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# class BaseFoo:\\n\",\n    \"#     def __init__(self, e, c=2): pass\\n\",\n    \"\\n\",\n    \"# @delegates()# since no argument was passsed here we delegate to the superclass\\n\",\n    \"# class Foo(BaseFoo):\\n\",\n    \"#     def __init__(self, a, b=1, **kwargs): super().__init__(**kwargs)\\n\",\n    \"\\n\",\n    \"# test_sig(Foo, '(a, b=1, *, c=2)')\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d1d1b68f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def use_parameters_of(\\n\",\n    \"    o: Union[Type, Callable[..., Any]], **kwargs: Dict[str, Any]\\n\",\n    \") -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"Restrict parameters passwed as keyword arguments to parameters from the signature of ``o``\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        o: object or callable which signature is used for restricting keyword arguments\\n\",\n    \"        kwargs: keyword arguments\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        restricted keyword arguments\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    allowed_keys = set(inspect.signature(o).parameters.keys())\\n\",\n    \"    return {k: v for k, v in kwargs.items() if k in allowed_keys}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1f329c93\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert use_parameters_of(AIOKafkaConsumer, api_version=0.1, radnom_param=\\\"random\\\") == {\\n\",\n    \"    \\\"api_version\\\": 0.1\\n\",\n    \"}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"25f1013a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def filter_using_signature(f: Callable, **kwargs: Dict[str, Any]) -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"todo: write docs\\\"\\\"\\\"\\n\",\n    \"    param_names = list(inspect.signature(f).parameters.keys())\\n\",\n    \"    return {k: v for k, v in kwargs.items() if k in param_names}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9051465c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def f(a: int, *, b: str):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert filter_using_signature(f, a=1, c=3) == {\\\"a\\\": 1}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1123f286\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"TorF = TypeVar(\\\"TorF\\\", Type, Callable[..., Any])\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def export(module_name: str) -> Callable[[TorF], TorF]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decorator that sets the __module__ attribute of the decorated object to the specified module name.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        module_name: Name of the module to set as __module__ attribute.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Decorator function that sets the __module__ attribute of the decorated object.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    def _inner(o: TorF, module_name: str = module_name) -> TorF:\\n\",\n    \"        o.__module__ = module_name\\n\",\n    \"        return o\\n\",\n    \"\\n\",\n    \"    return _inner\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a09e2c63\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@export(\\\"super.cool\\\")\\n\",\n    \"class A:\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert A.__module__ == \\\"super.cool\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b7ca0ffe\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Class context manager\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ce80a4fe\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"T = TypeVar(\\\"T\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def classcontextmanager(name: str = \\\"lifecycle\\\") -> Callable[[Type[T]], Type[T]]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Decorator that adds context manager functionality to a class.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: Name of the context manager attribute in the class. Default is \\\"lifecycle\\\".\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Decorator function that adds context manager functionality to the class.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    def _classcontextmanager(cls: Type[T], name: str = name) -> Type[T]:\\n\",\n    \"        if not hasattr(cls, name):\\n\",\n    \"            raise ValueError\\n\",\n    \"\\n\",\n    \"        @patch\\n\",\n    \"        def __enter__(self: cls) -> Any:  # type: ignore\\n\",\n    \"            if not hasattr(self, \\\"_lifecycle_ctx\\\"):\\n\",\n    \"                self._lifecycle_ctx = []  # type: ignore\\n\",\n    \"\\n\",\n    \"            self._lifecycle_ctx.append(getattr(self, name)())  # type: ignore\\n\",\n    \"            return self._lifecycle_ctx[-1].__enter__()  # type: ignore\\n\",\n    \"\\n\",\n    \"        @patch\\n\",\n    \"        def __exit__(self: cls, *args: Any) -> None:  # type: ignore\\n\",\n    \"            self._lifecycle_ctx.pop(-1).__exit__(*args)  # type: ignore\\n\",\n    \"\\n\",\n    \"        return cls\\n\",\n    \"\\n\",\n    \"    return _classcontextmanager\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0e61bdb4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@classcontextmanager(\\\"lifecycle\\\")\\n\",\n    \"class A:\\n\",\n    \"    @contextmanager\\n\",\n    \"    def lifecycle(self):\\n\",\n    \"        try:\\n\",\n    \"            print(\\\"I'm in\\\")\\n\",\n    \"            yield\\n\",\n    \"        finally:\\n\",\n    \"            print(\\\"I'm out\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"42dbd707\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"I'm in\\n\",\n      \"I'm in\\n\",\n      \"I'm out\\n\",\n      \"I'm out\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"a = A()\\n\",\n    \"with a:\\n\",\n    \"    with a:\\n\",\n    \"        pass\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"80105190\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _get_default_kwargs_from_sig(f: F, **kwargs: Any) -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Get default values for function **kwargs\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        f: Function to extract default values from\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Dict of default values of function f **kwargs\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    defaults = {\\n\",\n    \"        k: v.default\\n\",\n    \"        for k, v in inspect.signature(f).parameters.items()\\n\",\n    \"        if v.default != inspect._empty\\n\",\n    \"    }\\n\",\n    \"    defaults.update(kwargs)\\n\",\n    \"    return defaults\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c9dbfd25\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def A(c: int, b: str = \\\"default\\\", d: float = 3.14):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"assert _get_default_kwargs_from_sig(A) == {\\\"b\\\": \\\"default\\\", \\\"d\\\": 3.14}\\n\",\n    \"assert _get_default_kwargs_from_sig(A, d=5.15) == {\\\"b\\\": \\\"default\\\", \\\"d\\\": 5.15}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"54081356\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/097_Docs_Dependencies.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c5d84ff9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.docs_dependencies\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b4b0e6c8\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Install docs dependencies\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"16950f9d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import os\\n\",\n    \"import platform\\n\",\n    \"import shutil\\n\",\n    \"import subprocess  # nosec Issue: [B404:blacklist]\\n\",\n    \"import tarfile\\n\",\n    \"import zipfile\\n\",\n    \"from pathlib import Path\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import in_notebook\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"\\n\",\n    \"if in_notebook():\\n\",\n    \"    from tqdm.notebook import tqdm\\n\",\n    \"else:\\n\",\n    \"    from tqdm import tqdm\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2f57c397\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from contextlib import contextmanager\\n\",\n    \"\\n\",\n    \"import pytest\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"991a4c6e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7637df53\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"npm_required_major_version = 9\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _check_npm(required_major_version: int = npm_required_major_version) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Check if npm is installed and its major version is compatible with the required version.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        required_major_version: Required major version of npm. Defaults to 9.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If npm is not found or its major version is lower than the required version.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if shutil.which(\\\"npm\\\") is not None:\\n\",\n    \"        cmd = \\\"npm --version\\\"\\n\",\n    \"        proc = subprocess.run(  # nosec [B602:subprocess_popen_with_shell_equals_true]\\n\",\n    \"            cmd,\\n\",\n    \"            shell=True,\\n\",\n    \"            check=True,\\n\",\n    \"            capture_output=True,\\n\",\n    \"        )\\n\",\n    \"        major_version = int(proc.stdout.decode(\\\"UTF-8\\\").split(\\\".\\\")[0])\\n\",\n    \"        if major_version < required_major_version:\\n\",\n    \"            raise RuntimeError(\\n\",\n    \"                f\\\"Found installed npm major version: {major_version}, required npx major version: {required_major_version}. To use documentation features of FastKafka, please update npm\\\"\\n\",\n    \"            )\\n\",\n    \"    else:\\n\",\n    \"        raise RuntimeError(\\n\",\n    \"            f\\\"npm not found, to use documentation generation features of FastKafka, you must have npm >= {required_major_version} installed\\\"\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fe7da20a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@contextmanager\\n\",\n    \"def _clean_path():\\n\",\n    \"    path = os.environ[\\\"PATH\\\"]\\n\",\n    \"    try:\\n\",\n    \"        os.environ[\\\"PATH\\\"] = \\\"\\\"\\n\",\n    \"        yield\\n\",\n    \"    finally:\\n\",\n    \"        os.environ[\\\"PATH\\\"] = path\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f37a6e80\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"_check_npm()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"363f428b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with _clean_path():\\n\",\n    \"    with pytest.raises(RuntimeError) as e:\\n\",\n    \"        await _check_npm()\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    e.value.args[0]\\n\",\n    \"    == \\\"npm not found, to use documentation generation features of FastKafka, you must have npm >= 9 installed\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"996288e9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with pytest.raises(RuntimeError) as e:\\n\",\n    \"    await _check_npm(required_major_version=999)\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    e.value.args[0]\\n\",\n    \"    == \\\"Found installed npm major version: 9, required npx major version: 999. To use documentation features of FastKafka, please update npm\\\"\\n\",\n    \"), e.value.args[0]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a015d2c2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"node_version = \\\"v18.15.0\\\"\\n\",\n    \"node_fname_suffix = \\\"win-x64\\\" if platform.system() == \\\"Windows\\\" else \\\"linux-x64\\\"\\n\",\n    \"node_fname = f\\\"node-{node_version}-{node_fname_suffix}\\\"\\n\",\n    \"node_fname_extension = \\\".zip\\\" if platform.system() == \\\"Windows\\\" else \\\".tar.xz\\\"\\n\",\n    \"node_url = f\\\"https://nodejs.org/dist/{node_version}/{node_fname}{node_fname_extension}\\\"\\n\",\n    \"local_path = (\\n\",\n    \"    Path(os.path.expanduser(\\\"~\\\")).parent / \\\"Public\\\"\\n\",\n    \"    if platform.system() == \\\"Windows\\\"\\n\",\n    \"    else Path(os.path.expanduser(\\\"~\\\")) / \\\".local\\\"\\n\",\n    \")\\n\",\n    \"tgz_path = local_path / f\\\"{node_fname}{node_fname_extension}\\\"\\n\",\n    \"node_path = local_path / f\\\"{node_fname}\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _check_npm_with_local(node_path: Path = node_path) -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Check if npm is installed and its major version is compatible with the required version.\\n\",\n    \"    If npm is not found but a local installation of NodeJS is available, add the NodeJS binary path to the system's PATH environment variable.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        node_path: Path to the local installation of NodeJS. Defaults to node_path.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If npm is not found and a local installation of NodeJS is not available.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        _check_npm()\\n\",\n    \"    except RuntimeError as e:\\n\",\n    \"        if (node_path).exists():\\n\",\n    \"            logger.info(\\\"Found local installation of NodeJS.\\\")\\n\",\n    \"            node_binary_path = (\\n\",\n    \"                f\\\";{node_path}\\\"\\n\",\n    \"                if platform.system() == \\\"Windows\\\"\\n\",\n    \"                else f\\\":{node_path / 'bin'}\\\"\\n\",\n    \"            )\\n\",\n    \"            os.environ[\\\"PATH\\\"] = os.environ[\\\"PATH\\\"] + node_binary_path\\n\",\n    \"            _check_npm()\\n\",\n    \"        else:\\n\",\n    \"            raise e\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1d8c5b65\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"_check_npm_with_local()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"86b77faf\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# Not reentrant because when local is installed, it will break\\n\",\n    \"\\n\",\n    \"with _clean_path():\\n\",\n    \"    with pytest.raises(RuntimeError) as e:\\n\",\n    \"        _check_npm_with_local()\\n\",\n    \"\\n\",\n    \"assert (\\n\",\n    \"    e.value.args[0]\\n\",\n    \"    == \\\"npm not found, to use documentation generation features of FastKafka, you must have npm >= 9 installed\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8e6f1782\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _install_node(\\n\",\n    \"    *,\\n\",\n    \"    node_url: str = node_url,\\n\",\n    \"    local_path: Path = local_path,\\n\",\n    \"    tgz_path: Path = tgz_path,\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Install NodeJS by downloading the NodeJS distribution archive, extracting it, and adding the NodeJS binary path to the system's PATH environment variable.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        node_url: URL of the NodeJS distribution archive to download. Defaults to node_url.\\n\",\n    \"        local_path: Path to store the downloaded distribution archive. Defaults to local_path.\\n\",\n    \"        tgz_path: Path of the downloaded distribution archive. Defaults to tgz_path.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        import requests\\n\",\n    \"    except Exception as e:\\n\",\n    \"        msg = \\\"Please install docs version of fastkafka using 'pip install fastkafka[docs]' command\\\"\\n\",\n    \"        logger.error(msg)\\n\",\n    \"        raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"    logger.info(\\\"Installing NodeJS...\\\")\\n\",\n    \"    local_path.mkdir(exist_ok=True, parents=True)\\n\",\n    \"    response = requests.get(\\n\",\n    \"        node_url,\\n\",\n    \"        stream=True,\\n\",\n    \"        timeout=60,\\n\",\n    \"    )\\n\",\n    \"    try:\\n\",\n    \"        total = response.raw.length_remaining // 128\\n\",\n    \"    except Exception:\\n\",\n    \"        total = None\\n\",\n    \"\\n\",\n    \"    with open(tgz_path, \\\"wb\\\") as f:\\n\",\n    \"        for data in tqdm(response.iter_content(chunk_size=128), total=total):\\n\",\n    \"            f.write(data)\\n\",\n    \"\\n\",\n    \"    if platform.system() == \\\"Windows\\\":\\n\",\n    \"        with zipfile.ZipFile(tgz_path, \\\"r\\\") as zip_ref:\\n\",\n    \"            zip_ref.extractall(\\n\",\n    \"                local_path\\n\",\n    \"            )  # nosec: B202 tarfile_unsafe_members - tarfile.extractall used without any validation. Please check and discard dangerous members.\\n\",\n    \"    else:\\n\",\n    \"        with tarfile.open(tgz_path) as tar:\\n\",\n    \"            for tarinfo in tar:\\n\",\n    \"                tar.extract(tarinfo, local_path)\\n\",\n    \"\\n\",\n    \"    os.environ[\\\"PATH\\\"] = (\\n\",\n    \"        os.environ[\\\"PATH\\\"] + f\\\";{node_path}\\\"\\n\",\n    \"        if platform.system() == \\\"Windows\\\"\\n\",\n    \"        else f\\\":{node_path}/bin\\\"\\n\",\n    \"    )\\n\",\n    \"    logger.info(f\\\"Node installed in {node_path}.\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"55a078fe\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: Installing NodeJS...\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"5a68aefa6df6433ea191b07606462cca\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"  0%|          | 0/184668 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: Node installed in /home/davor/.local/node-v18.15.0-linux-x64.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# Breaks because other tests running in parallel are using already installed node\\n\",\n    \"\\n\",\n    \"_install_node()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9595e0bb\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: Found local installation of NodeJS.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"with _clean_path():\\n\",\n    \"    _check_npm_with_local()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"51056070\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def _install_docs_npm_deps() -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Install the required npm dependencies for generating the documentation using AsyncAPI generator.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    with TemporaryDirectory() as d:\\n\",\n    \"        cmd = (\\n\",\n    \"            \\\"npx -y -p @asyncapi/generator ag https://raw.githubusercontent.com/asyncapi/asyncapi/master/examples/simple.yml @asyncapi/html-template -o \\\"\\n\",\n    \"            + d\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        proc = await asyncio.create_subprocess_shell(\\n\",\n    \"            cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE\\n\",\n    \"        )\\n\",\n    \"        stdout, stderr = await proc.communicate()\\n\",\n    \"\\n\",\n    \"        if proc.returncode == 0:\\n\",\n    \"            logger.info(\\\"AsyncAPI generator installed\\\")\\n\",\n    \"        else:\\n\",\n    \"            logger.error(\\\"AsyncAPI generator NOT installed!\\\")\\n\",\n    \"            logger.info(\\n\",\n    \"                f\\\"stdout of '$ {cmd}'{stdout.decode('UTF-8')} \\\\n return_code={proc.returncode}\\\"\\n\",\n    \"            )\\n\",\n    \"            logger.info(\\n\",\n    \"                f\\\"stderr of '$ {cmd}'{stderr.decode('UTF-8')} \\\\n return_code={proc.returncode}\\\"\\n\",\n    \"            )\\n\",\n    \"            raise ValueError(\\n\",\n    \"                f\\\"\\\"\\\"AsyncAPI generator NOT installed, used '$ {cmd}'\\n\",\n    \"----------------------------------------\\n\",\n    \"stdout:\\n\",\n    \"{stdout.decode(\\\"UTF-8\\\")}\\n\",\n    \"----------------------------------------\\n\",\n    \"stderr:\\n\",\n    \"{stderr.decode(\\\"UTF-8\\\")}\\n\",\n    \"----------------------------------------\\n\",\n    \"return_code={proc.returncode}\\\"\\\"\\\"\\n\",\n    \"            )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2a498aa8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: AsyncAPI generator installed\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# Breaks because other tests running in parallel are using already installed node\\n\",\n    \"\\n\",\n    \"await _install_docs_npm_deps()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7aae8f4f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/098_Test_Dependencies.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7427cbb3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.test_dependencies\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"5ea7e8b8\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Install Test Dependencies\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f96418b5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"import re\\n\",\n    \"import platform\\n\",\n    \"import shutil\\n\",\n    \"import tarfile\\n\",\n    \"from contextlib import contextmanager\\n\",\n    \"from html.parser import HTMLParser\\n\",\n    \"from os import environ, rename\\n\",\n    \"from os.path import expanduser\\n\",\n    \"from pathlib import Path\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from packaging import version\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import change_dir, in_notebook\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"\\n\",\n    \"if in_notebook():\\n\",\n    \"    from tqdm.notebook import tqdm\\n\",\n    \"else:\\n\",\n    \"    from tqdm import tqdm\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"19b144a5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from fastkafka._components.logger import suppress_timestamps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"68b5eebe\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cd7f23f5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d3337f8e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def check_java(*, potential_jdk_path: Optional[List[Path]] = None) -> bool:\\n\",\n    \"    \\\"\\\"\\\"Checks if JDK 11 is installed on the machine and exports it to PATH if necessary.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        potential_jdk_path: Optional. List of potential paths where JDK 11 may be installed.\\n\",\n    \"                            If not provided, it defaults to searching for JDK 11 in the user's home directory.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        bool: True if JDK 11 is installed and exported to PATH, False otherwise.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if potential_jdk_path is None:\\n\",\n    \"        potential_jdk_path = list(Path(expanduser(\\\"~\\\") + \\\"/.jdk\\\").glob(\\\"jdk-11*\\\"))\\n\",\n    \"\\n\",\n    \"    if potential_jdk_path != []:\\n\",\n    \"        logger.info(\\\"Java is already installed.\\\")\\n\",\n    \"        if not shutil.which(\\\"java\\\"):\\n\",\n    \"            logger.info(\\\"But not exported to PATH, exporting...\\\")\\n\",\n    \"            env_path_separator = \\\";\\\" if platform.system() == \\\"Windows\\\" else \\\":\\\"\\n\",\n    \"            environ[\\\"PATH\\\"] = (\\n\",\n    \"                environ[\\\"PATH\\\"] + f\\\"{env_path_separator}{potential_jdk_path[0]/ 'bin'}\\\"\\n\",\n    \"            )\\n\",\n    \"        return True\\n\",\n    \"    return False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1d8daf9f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: Java is already installed.\\n\",\n      \"[INFO] __main__: But not exported to PATH, exporting...\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"assert check_java()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"374f061d\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _install_java() -> None:\\n\",\n    \"    \\\"\\\"\\\"Checks if jdk-11 is installed on the machine and installs it if not\\n\",\n    \"    \\n\",\n    \"    Returns:\\n\",\n    \"       None\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If JDK 11 installation fails.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        import jdk\\n\",\n    \"    except Exception as e:\\n\",\n    \"        msg = \\\"Please install test version of fastkafka using 'pip install fastkafka[test]' command\\\"\\n\",\n    \"        logger.error(msg)\\n\",\n    \"        raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"    if not check_java():\\n\",\n    \"        logger.info(\\\"Installing Java...\\\")\\n\",\n    \"        logger.info(\\\" - installing jdk...\\\")\\n\",\n    \"        jdk_bin_path = Path(jdk.install(\\\"11\\\"))\\n\",\n    \"        logger.info(f\\\" - jdk path: {jdk_bin_path}\\\")\\n\",\n    \"        env_path_separator = \\\";\\\" if platform.system() == \\\"Windows\\\" else \\\":\\\"\\n\",\n    \"        environ[\\\"PATH\\\"] = (\\n\",\n    \"            environ[\\\"PATH\\\"] + f\\\"{env_path_separator}{jdk_bin_path / 'bin'}\\\"\\n\",\n    \"        )\\n\",\n    \"        logger.info(\\\"Java installed.\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"69a5925a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: Java is already installed.\\n\",\n      \"[INFO] __main__: Java is already installed.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"_install_java()\\n\",\n    \"assert shutil.which(\\\"java\\\")\\n\",\n    \"_install_java()\\n\",\n    \"assert shutil.which(\\\"java\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c3da8781\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class VersionParser(HTMLParser):\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    A parser class for extracting the newest version of a software from HTML data.\\n\",\n    \"\\n\",\n    \"    This class extends the HTMLParser class and provides a mechanism to extract the newest version of a software\\n\",\n    \"    from HTML data using regular expressions.\\n\",\n    \"\\n\",\n    \"    Attributes:\\n\",\n    \"        newest_version (str): The newest version of the software.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    def __init__(self) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Initializes a VersionParser object.\\n\",\n    \"\\n\",\n    \"        The newest_version attribute is initialized to \\\"0.0.0\\\".\\n\",\n    \"\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        HTMLParser.__init__(self)\\n\",\n    \"        self.newest_version = \\\"0.0.0\\\"\\n\",\n    \"\\n\",\n    \"    def handle_data(self, data: str) -> None:\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        Handles the data encountered in the HTML parsing process.\\n\",\n    \"\\n\",\n    \"        This method is called by the HTMLParser base class when data is encountered within HTML tags.\\n\",\n    \"        It uses regular expressions to search for version numbers in the data and updates the newest_version\\n\",\n    \"        attribute if a higher version is found.\\n\",\n    \"\\n\",\n    \"        Args:\\n\",\n    \"            data (str): The data encountered during parsing.\\n\",\n    \"\\n\",\n    \"        \\\"\\\"\\\"\\n\",\n    \"        match = re.search(\\\"[0-9]+\\\\.[0-9]+\\\\.[0-9]+\\\", data)\\n\",\n    \"        if match is not None:\\n\",\n    \"            if version.parse(self.newest_version) < version.parse(match.group(0)):\\n\",\n    \"                self.newest_version = match.group(0)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"378d6d41\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"html = \\\"\\\"\\\"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 3.2 Final//EN\\\">\\n\",\n    \"<html>\\n\",\n    \" <head>\\n\",\n    \"  <title>Index of /kafka</title>\\n\",\n    \" </head>\\n\",\n    \" <body>\\n\",\n    \"<h1>Index of /kafka</h1>\\n\",\n    \"<pre><img src=\\\"/icons/blank.gif\\\" alt=\\\"Icon \\\"> <a href=\\\"?C=N;O=D\\\">Name</a>                    <a href=\\\"?C=M;O=A\\\">Last modified</a>      <a href=\\\"?C=S;O=A\\\">Size</a>  <a href=\\\"?C=D;O=A\\\">Description</a><hr><img src=\\\"/icons/back.gif\\\" alt=\\\"[PARENTDIR]\\\"> <a href=\\\"/\\\">Parent Directory</a>                             -   \\n\",\n    \"<img src=\\\"/icons/folder.gif\\\" alt=\\\"[DIR]\\\"> <a href=\\\"3.4.1/\\\">3.4.1/</a>                  2023-06-06 03:55    -   \\n\",\n    \"<img src=\\\"/icons/folder.gif\\\" alt=\\\"[DIR]\\\"> <a href=\\\"3.5.0/\\\">3.5.0/</a>                  2023-06-13 10:29    -   \\n\",\n    \"<img src=\\\"/icons/unknown.gif\\\" alt=\\\"[   ]\\\"> <a href=\\\"KEYS\\\">KEYS</a>                    2023-06-06 03:37  113K  \\n\",\n    \"<hr></pre>\\n\",\n    \"</body></html>\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"parser = VersionParser()\\n\",\n    \"parser.feed(html)\\n\",\n    \"assert parser.newest_version == \\\"3.5.0\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c22b5225\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"kafka_repo_url=\\\"https://dlcdn.apache.org/kafka\\\"\\n\",\n    \"\\n\",\n    \"def get_kafka_version(kafka_repo_url: str=kafka_repo_url) -> str:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Retrieves the newest version of Kafka from the given Kafka repository URL.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        kafka_repo_url: The URL of the Kafka repository. Defaults to `https://dlcdn.apache.org/kafka`.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The newest version of Kafka as a string.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If the requests module is not installed or encounters an error during the request.\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        import requests\\n\",\n    \"    except Exception as e:\\n\",\n    \"        msg = \\\"Please install test version of fastkafka using 'pip install fastkafka[test]' command\\\"\\n\",\n    \"        logger.error(msg)\\n\",\n    \"        raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"    parser = VersionParser()\\n\",\n    \"\\n\",\n    \"    response = requests.get(\\n\",\n    \"        kafka_repo_url,\\n\",\n    \"        timeout=60,\\n\",\n    \"    )\\n\",\n    \"    parser.feed(response.text)\\n\",\n    \"\\n\",\n    \"    return parser.newest_version\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"500f640f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"local_path = (\\n\",\n    \"    Path(expanduser(\\\"~\\\")).parent / \\\"Public\\\"\\n\",\n    \"    if platform.system() == \\\"Windows\\\"\\n\",\n    \"    else Path(expanduser(\\\"~\\\")) / \\\".local\\\"\\n\",\n    \") \"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a015d2c2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def check_kafka(local_path: Path = local_path) -> bool:\\n\",\n    \"    \\\"\\\"\\\"Checks if Kafka is installed on the machine and exports it to PATH if necessary.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        kafka_path: Path to the Kafka installation directory. Defaults to the global variable `kafka_path`.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        bool: True if Kafka is installed and exported to PATH, False otherwise.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    \\n\",\n    \"    kafka_fname = f\\\"kafka_2.13-{get_kafka_version()}\\\"\\n\",\n    \"    \\n\",\n    \"    kafka_path = (\\n\",\n    \"        local_path / \\\"kafka\\\"\\n\",\n    \"        if platform.system() == \\\"Windows\\\"\\n\",\n    \"        else local_path / f\\\"{kafka_fname}\\\"\\n\",\n    \"    )\\n\",\n    \"    \\n\",\n    \"    if (kafka_path / \\\"bin\\\").exists():\\n\",\n    \"        logger.info(\\\"Kafka is installed.\\\")\\n\",\n    \"        if not shutil.which(\\\"kafka-server-start.sh\\\"):\\n\",\n    \"            logger.info(\\\"But not exported to PATH, exporting...\\\")\\n\",\n    \"            kafka_binary_path = (\\n\",\n    \"                f\\\";{kafka_path / 'bin' / 'windows'}\\\"\\n\",\n    \"                if platform.system() == \\\"Windows\\\"\\n\",\n    \"                else f\\\":{kafka_path / 'bin'}\\\"\\n\",\n    \"            )\\n\",\n    \"            environ[\\\"PATH\\\"] = environ[\\\"PATH\\\"] + kafka_binary_path\\n\",\n    \"        return True\\n\",\n    \"    return False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9d1dcfa4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _install_kafka(\\n\",\n    \"    local_path: Path = local_path, kafka_repo_url: str = kafka_repo_url\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"Checks if Kafka is installed on the machine and installs it if not.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        local_path: Path where the Kafka installation package will be stored. Defaults to the global variable `local_path`.\\n\",\n    \"        kafka_repo_url: The URL of the Kafka repository. Defaults to `https://dlcdn.apache.org/kafka`.\\n\",\n    \"    Returns:\\n\",\n    \"       None\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If Kafka installation fails.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        import requests\\n\",\n    \"    except Exception as e:\\n\",\n    \"        msg = \\\"Please install test version of fastkafka using 'pip install fastkafka[test]' command\\\"\\n\",\n    \"        logger.error(msg)\\n\",\n    \"        raise RuntimeError(msg)\\n\",\n    \"\\n\",\n    \"    kafka_version = get_kafka_version()\\n\",\n    \"    kafka_fname = f\\\"kafka_2.13-{kafka_version}\\\"\\n\",\n    \"    kafka_url = f\\\"{kafka_repo_url}/{kafka_version}/{kafka_fname}.tgz\\\"\\n\",\n    \"    tgz_path = local_path / f\\\"{kafka_fname}.tgz\\\"\\n\",\n    \"    kafka_path = (\\n\",\n    \"        local_path / \\\"kafka\\\"\\n\",\n    \"        if platform.system() == \\\"Windows\\\"\\n\",\n    \"        else local_path / f\\\"{kafka_fname}\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    if not check_kafka():\\n\",\n    \"        logger.info(\\\"Installing Kafka...\\\")\\n\",\n    \"        local_path.mkdir(exist_ok=True, parents=True)\\n\",\n    \"        response = requests.get(\\n\",\n    \"            kafka_url,\\n\",\n    \"            stream=True,\\n\",\n    \"            timeout=60,\\n\",\n    \"        )\\n\",\n    \"        try:\\n\",\n    \"            total = response.raw.length_remaining // 128\\n\",\n    \"        except Exception:\\n\",\n    \"            total = None\\n\",\n    \"\\n\",\n    \"        with open(tgz_path, \\\"wb\\\") as f:\\n\",\n    \"            for data in tqdm(response.iter_content(chunk_size=128), total=total):\\n\",\n    \"                f.write(data)\\n\",\n    \"\\n\",\n    \"        with tarfile.open(tgz_path) as tar:\\n\",\n    \"            for tarinfo in tar:\\n\",\n    \"                tar.extract(tarinfo, local_path)\\n\",\n    \"\\n\",\n    \"        if platform.system() == \\\"Windows\\\":\\n\",\n    \"            rename(local_path / f\\\"{kafka_fname}\\\", kafka_path)\\n\",\n    \"\\n\",\n    \"        kafka_binary_path = (\\n\",\n    \"            f\\\";{kafka_path / 'bin' / 'windows'}\\\"\\n\",\n    \"            if platform.system() == \\\"Windows\\\"\\n\",\n    \"            else f\\\":{kafka_path / 'bin'}\\\"\\n\",\n    \"        )\\n\",\n    \"        environ[\\\"PATH\\\"] = environ[\\\"PATH\\\"] + kafka_binary_path\\n\",\n    \"        logger.info(f\\\"Kafka installed in {kafka_path}.\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0417b349\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: Kafka is installed.\\n\",\n      \"[INFO] __main__: But not exported to PATH, exporting...\\n\",\n      \"[INFO] __main__: Kafka is installed.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"script_extension = \\\"bat\\\" if platform.system() == \\\"Windows\\\" else \\\"sh\\\"\\n\",\n    \"\\n\",\n    \"_install_kafka()\\n\",\n    \"assert shutil.which(f\\\"kafka-server-start.{script_extension}\\\")\\n\",\n    \"_install_kafka()\\n\",\n    \"assert shutil.which(f\\\"kafka-server-start.{script_extension}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"eb3faa78\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _install_testing_deps() -> None:\\n\",\n    \"    \\\"\\\"\\\"Installs Java and Kafka dependencies required for testing.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        RuntimeError: If Java or Kafka installation fails.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    _install_java()\\n\",\n    \"    _install_kafka()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"21cd9601\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"script_extension = \\\"bat\\\" if platform.system() == \\\"Windows\\\" else \\\"sh\\\"\\n\",\n    \"\\n\",\n    \"_install_testing_deps()\\n\",\n    \"assert shutil.which(\\\"java\\\")\\n\",\n    \"assert shutil.which(f\\\"kafka-server-start.{script_extension}\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a798e8ef\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def generate_app_src(out_path: Union[Path, str]) -> None:\\n\",\n    \"    \\\"\\\"\\\"Generates the source code for the test application based on a Jupyter notebook.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        out_path: Path where the generated source code will be saved.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        ValueError: If the Jupyter notebook file does not exist.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    import nbformat\\n\",\n    \"    from nbconvert import PythonExporter\\n\",\n    \"\\n\",\n    \"    path = Path(\\\"099_Test_Service.ipynb\\\")\\n\",\n    \"    if not path.exists():\\n\",\n    \"        path = Path(\\\"..\\\") / \\\"099_Test_Service.ipynb\\\"\\n\",\n    \"    if not path.exists():\\n\",\n    \"        raise ValueError(f\\\"Path '{path.resolve()}' does not exists.\\\")\\n\",\n    \"\\n\",\n    \"    with open(path, \\\"r\\\") as f:\\n\",\n    \"        notebook = nbformat.reads(f.read(), nbformat.NO_CONVERT)\\n\",\n    \"        exporter = PythonExporter()\\n\",\n    \"        source, _ = exporter.from_notebook_node(notebook)\\n\",\n    \"\\n\",\n    \"    with open(out_path, \\\"w\\\") as f:\\n\",\n    \"        f.write(source)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ba8a287f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    generate_app_src((Path(d) / \\\"main.py\\\"))\\n\",\n    \"    !ls -al {d}\\n\",\n    \"    !cat {d}/main.py | grep @kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"23b0e599\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@contextmanager\\n\",\n    \"def generate_app_in_tmp() -> Generator[str, None, None]:\\n\",\n    \"    \\\"\\\"\\\"Context manager that generates the test application source code in a temporary directory.\\n\",\n    \"\\n\",\n    \"    Yields:\\n\",\n    \"        str: Import statement for the generated test application.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    with TemporaryDirectory() as d:\\n\",\n    \"        src_path = Path(d) / \\\"main.py\\\"\\n\",\n    \"        generate_app_src(src_path)\\n\",\n    \"        with change_dir(d):\\n\",\n    \"            import_str = f\\\"{src_path.stem}:kafka_app\\\"\\n\",\n    \"            yield import_str\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"da3cab38\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with generate_app_in_tmp() as actual_import_str:\\n\",\n    \"    display(actual_import_str)\\n\",\n    \"    expected_import_str = \\\"main:kafka_app\\\"\\n\",\n    \"    assert actual_import_str == expected_import_str\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"99ce89c4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/099_Test_Service.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b2bb7341\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"/home/kumaran/.local/lib/python3.11/site-packages/pydantic/_internal/_config.py:257: UserWarning: Valid config keys have changed in V2:\\n\",\n      \"* 'json_encoders' has been removed\\n\",\n      \"  warnings.warn(message, UserWarning)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from datetime import datetime\\n\",\n    \"from enum import Enum\\n\",\n    \"from os import environ\\n\",\n    \"from pathlib import Path\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import yaml\\n\",\n    \"from pydantic import BaseModel, Field, NonNegativeInt\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from fastkafka._components.logger import get_logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"132d9f86\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b0303458\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class ModelType(str, Enum):\\n\",\n    \"    churn = \\\"churn\\\"\\n\",\n    \"    propensity_to_buy = \\\"propensity_to_buy\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class ModelTrainingRequest(BaseModel):\\n\",\n    \"    AccountId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=202020, description=\\\"ID of an account\\\"\\n\",\n    \"    )\\n\",\n    \"    ModelName: ModelType = Field(..., example=\\\"churn\\\", description=\\\"ID of an account\\\")\\n\",\n    \"    total_no_of_records: NonNegativeInt = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=1_000_000,\\n\",\n    \"        description=\\\"total number of records (rows) to be ingested\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class EventData(BaseModel):\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    A sequence of events for a fixed account_id\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    AccountId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=202020, description=\\\"ID of an account\\\"\\n\",\n    \"    )\\n\",\n    \"    Application: Optional[str] = Field(\\n\",\n    \"        None,\\n\",\n    \"        example=\\\"DriverApp\\\",\\n\",\n    \"        description=\\\"Name of the application in case there is more than one for the AccountId\\\",\\n\",\n    \"    )\\n\",\n    \"    DefinitionId: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"appLaunch\\\",\\n\",\n    \"        description=\\\"name of the event\\\",\\n\",\n    \"        min_length=1,\\n\",\n    \"    )\\n\",\n    \"    OccurredTime: datetime = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"2021-03-28T00:34:08\\\",\\n\",\n    \"        description=\\\"local time of the event\\\",\\n\",\n    \"    )\\n\",\n    \"    OccurredTimeTicks: NonNegativeInt = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=1616891648496,\\n\",\n    \"        description=\\\"local time of the event as the number of ticks\\\",\\n\",\n    \"    )\\n\",\n    \"    PersonId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=12345678, description=\\\"ID of a person\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class RealtimeData(BaseModel):\\n\",\n    \"    event_data: EventData = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=dict(\\n\",\n    \"            AccountId=202020,\\n\",\n    \"            Application=\\\"DriverApp\\\",\\n\",\n    \"            DefinitionId=\\\"appLaunch\\\",\\n\",\n    \"            OccurredTime=\\\"2021-03-28T00:34:08\\\",\\n\",\n    \"            OccurredTimeTicks=1616891648496,\\n\",\n    \"            PersonId=12345678,\\n\",\n    \"        ),\\n\",\n    \"        description=\\\"realtime event data\\\",\\n\",\n    \"    )\\n\",\n    \"    make_prediction: bool = Field(\\n\",\n    \"        ..., example=True, description=\\\"trigger prediction message in prediction topic\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TrainingDataStatus(BaseModel):\\n\",\n    \"    AccountId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=202020, description=\\\"ID of an account\\\"\\n\",\n    \"    )\\n\",\n    \"    no_of_records: NonNegativeInt = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=12_345,\\n\",\n    \"        description=\\\"number of records (rows) ingested\\\",\\n\",\n    \"    )\\n\",\n    \"    total_no_of_records: NonNegativeInt = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=1_000_000,\\n\",\n    \"        description=\\\"total number of records (rows) to be ingested\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TrainingModelStatus(BaseModel):\\n\",\n    \"    AccountId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=202020, description=\\\"ID of an account\\\"\\n\",\n    \"    )\\n\",\n    \"    current_step: NonNegativeInt = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=0,\\n\",\n    \"        description=\\\"number of records (rows) ingested\\\",\\n\",\n    \"    )\\n\",\n    \"    current_step_percentage: float = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=0.21,\\n\",\n    \"        description=\\\"the percentage of the current step completed\\\",\\n\",\n    \"    )\\n\",\n    \"    total_no_of_steps: NonNegativeInt = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=1_000_000,\\n\",\n    \"        description=\\\"total number of steps for training the model\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class ModelMetrics(BaseModel):\\n\",\n    \"    \\\"\\\"\\\"The standard metrics for classification models.\\n\",\n    \"\\n\",\n    \"    The most important metrics is AUC for unbalanced classes such as churn. Metrics such as\\n\",\n    \"    accuracy are not very useful since they are easily maximized by outputting the most common\\n\",\n    \"    class all the time.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    AccountId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=202020, description=\\\"ID of an account\\\"\\n\",\n    \"    )\\n\",\n    \"    Application: Optional[str] = Field(\\n\",\n    \"        None,\\n\",\n    \"        example=\\\"DriverApp\\\",\\n\",\n    \"        description=\\\"Name of the application in case there is more than one for the AccountId\\\",\\n\",\n    \"    )\\n\",\n    \"    timestamp: datetime = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"2021-03-28T00:34:08\\\",\\n\",\n    \"        description=\\\"UTC time when the model was trained\\\",\\n\",\n    \"    )\\n\",\n    \"    mod_type: ModelType = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"churn\\\",\\n\",\n    \"        description=\\\"Name of the model used (churn, propensity to buy)\\\",\\n\",\n    \"    )\\n\",\n    \"    auc: float = Field(\\n\",\n    \"        ..., example=0.91, description=\\\"Area under ROC curve\\\", ge=0.0, le=1.0\\n\",\n    \"    )\\n\",\n    \"    f1: float = Field(..., example=0.89, description=\\\"F-1 score\\\", ge=0.0, le=1.0)\\n\",\n    \"    precission: float = Field(\\n\",\n    \"        ..., example=0.84, description=\\\"precission\\\", ge=0.0, le=1.0\\n\",\n    \"    )\\n\",\n    \"    recall: float = Field(..., example=0.82, description=\\\"recall\\\", ge=0.0, le=1.0)\\n\",\n    \"    accuracy: float = Field(..., example=0.82, description=\\\"accuracy\\\", ge=0.0, le=1.0)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class Prediction(BaseModel):\\n\",\n    \"    AccountId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=202020, description=\\\"ID of an account\\\"\\n\",\n    \"    )\\n\",\n    \"    Application: Optional[str] = Field(\\n\",\n    \"        None,\\n\",\n    \"        example=\\\"DriverApp\\\",\\n\",\n    \"        description=\\\"Name of the application in case there is more than one for the AccountId\\\",\\n\",\n    \"    )\\n\",\n    \"    PersonId: NonNegativeInt = Field(\\n\",\n    \"        ..., example=12345678, description=\\\"ID of a person\\\"\\n\",\n    \"    )\\n\",\n    \"    prediction_time: datetime = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"2021-03-28T00:34:08\\\",\\n\",\n    \"        description=\\\"UTC time of prediction\\\",\\n\",\n    \"    )\\n\",\n    \"    mod_type: ModelType = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"churn\\\",\\n\",\n    \"        description=\\\"Name of the model used (churn, propensity to buy)\\\",\\n\",\n    \"    )\\n\",\n    \"    score: float = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=0.4321,\\n\",\n    \"        description=\\\"Prediction score (e.g. the probability of churn in the next 28 days)\\\",\\n\",\n    \"        ge=0.0,\\n\",\n    \"        le=1.0,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"88dc9adc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"_total_no_of_records = 0\\n\",\n    \"_no_of_records_received = 0\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def create_ws_server(assets_path: Path = Path(\\\"./assets\\\")) -> FastKafka:\\n\",\n    \"    title = \\\"Example for FastKafka\\\"\\n\",\n    \"    description = \\\"A simple example on how to use FastKafka\\\"\\n\",\n    \"    version = \\\"0.0.1\\\"\\n\",\n    \"    openapi_url = \\\"/openapi.json\\\"\\n\",\n    \"    favicon_url = \\\"/assets/images/favicon.ico\\\"\\n\",\n    \"\\n\",\n    \"    contact = dict(name=\\\"airt.ai\\\", url=\\\"https://airt.ai\\\", email=\\\"info@airt.ai\\\")\\n\",\n    \"\\n\",\n    \"    kafka_brokers = {\\n\",\n    \"        \\\"localhost\\\": {\\n\",\n    \"            \\\"url\\\": environ.get(\\\"KAFKA_HOSTNAME\\\", \\\"localhost\\\"),\\n\",\n    \"            \\\"description\\\": \\\"local development kafka\\\",\\n\",\n    \"            \\\"port\\\": environ.get(\\\"KAFKA_PORT\\\", \\\"9092\\\"),\\n\",\n    \"        },\\n\",\n    \"        \\\"staging\\\": {\\n\",\n    \"            \\\"url\\\": \\\"kafka.staging.acme.com\\\",\\n\",\n    \"            \\\"description\\\": \\\"staging kafka\\\",\\n\",\n    \"            \\\"port\\\": \\\"9092\\\",\\n\",\n    \"            \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"            \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"        },\\n\",\n    \"        \\\"production\\\": {\\n\",\n    \"            \\\"url\\\": \\\"kafka.infobip.acme.com\\\",\\n\",\n    \"            \\\"description\\\": \\\"production kafka\\\",\\n\",\n    \"            \\\"port\\\": \\\"9092\\\",\\n\",\n    \"            \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"            \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"        },\\n\",\n    \"    }\\n\",\n    \"\\n\",\n    \"    kafka_server_url = environ.get(\\\"KAFKA_HOSTNAME\\\", \\\"host_not_set\\\")\\n\",\n    \"    kafka_server_port = environ.get(\\\"KAFKA_PORT\\\", \\\"9999\\\")\\n\",\n    \"    kafka_config = {\\n\",\n    \"        \\\"group_id\\\": f\\\"{kafka_server_url}:{kafka_server_port}_group\\\",\\n\",\n    \"        \\\"auto_offset_reset\\\": \\\"earliest\\\",\\n\",\n    \"    }\\n\",\n    \"    if \\\"KAFKA_API_KEY\\\" in environ:\\n\",\n    \"        kafka_config = {\\n\",\n    \"            **kafka_config,\\n\",\n    \"            **{\\n\",\n    \"                \\\"security_protocol\\\": \\\"SASL_SSL\\\",\\n\",\n    \"                \\\"sasl_mechanisms\\\": \\\"PLAIN\\\",\\n\",\n    \"                \\\"sasl_username\\\": environ[\\\"KAFKA_API_KEY\\\"],\\n\",\n    \"                \\\"sasl_password\\\": environ[\\\"KAFKA_API_SECRET\\\"],\\n\",\n    \"            },\\n\",\n    \"        }\\n\",\n    \"\\n\",\n    \"    kafka_app = FastKafka(\\n\",\n    \"        title=title,\\n\",\n    \"        contact=contact,\\n\",\n    \"        description=description,\\n\",\n    \"        version=version,\\n\",\n    \"        kafka_brokers=kafka_brokers,\\n\",\n    \"        **kafka_config,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    @kafka_app.consumes()  # type: ignore\\n\",\n    \"    async def on_training_data(msg: EventData):\\n\",\n    \"        # ToDo: this is not showing up in logs\\n\",\n    \"        logger.debug(f\\\"msg={msg}\\\")\\n\",\n    \"        global _total_no_of_records\\n\",\n    \"        global _no_of_records_received\\n\",\n    \"        _no_of_records_received = _no_of_records_received + 1\\n\",\n    \"\\n\",\n    \"        if _no_of_records_received % 100 == 0:\\n\",\n    \"            training_data_status = TrainingDataStatus(\\n\",\n    \"                AccountId=EventData.AccountId,\\n\",\n    \"                no_of_records=_no_of_records_received,\\n\",\n    \"                total_no_of_records=_total_no_of_records,\\n\",\n    \"            )\\n\",\n    \"            app.produce(\\\"training_data_status\\\", training_data_status)\\n\",\n    \"\\n\",\n    \"    @kafka_app.consumes()  # type: ignore\\n\",\n    \"    async def on_realitime_data(msg: RealtimeData):\\n\",\n    \"        pass\\n\",\n    \"\\n\",\n    \"    @kafka_app.produces()  # type: ignore\\n\",\n    \"    async def to_training_data_status(msg: TrainingDataStatus) -> TrainingDataStatus:\\n\",\n    \"        logger.debug(f\\\"on_training_data_status(msg={msg}, kafka_msg={kafka_msg})\\\")\\n\",\n    \"        return msg\\n\",\n    \"\\n\",\n    \"    @kafka_app.produces()  # type: ignore\\n\",\n    \"    async def to_training_model_status(msg: str) -> TrainingModelStatus:\\n\",\n    \"        logger.debug(f\\\"on_training_model_status(msg={msg}, kafka_msg={kafka_msg})\\\")\\n\",\n    \"        return TrainingModelStatus()\\n\",\n    \"\\n\",\n    \"    @kafka_app.produces()  # type: ignore\\n\",\n    \"    async def to_model_metrics(msg: ModelMetrics) -> ModelMetrics:\\n\",\n    \"        logger.debug(f\\\"on_training_model_status(msg={msg}, kafka_msg={kafka_msg})\\\")\\n\",\n    \"        return msg\\n\",\n    \"\\n\",\n    \"    @kafka_app.produces()  # type: ignore\\n\",\n    \"    async def to_prediction(msg: Prediction) -> Prediction:\\n\",\n    \"        logger.debug(f\\\"on_realtime_data_status(msg={msg},, kafka_msg={kafka_msg})\\\")\\n\",\n    \"        return msg\\n\",\n    \"\\n\",\n    \"    return kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"03a30de5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def create_app(assets_path: Path = Path(\\\"../assets\\\")) -> FastKafka:\\n\",\n    \"    assets_path = assets_path.resolve()\\n\",\n    \"    kafka_app = create_ws_server(assets_path=assets_path)\\n\",\n    \"    return kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b49811c7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assets_path: Path = Path(\\\"../assets\\\")\\n\",\n    \"kafka_app = create_app(assets_path=assets_path)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8b95a72c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/998_Internal_Helpers.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d2a8e928\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.helpers\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d3d7d267\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Internal helpers\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b84d16ae\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def in_notebook() -> bool:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Checks if the code is running in a Jupyter notebook or not.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        True if running in a Jupyter notebook, False otherwise.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    try:\\n\",\n    \"        from IPython import get_ipython\\n\",\n    \"\\n\",\n    \"        if \\\"IPKernelApp\\\" not in get_ipython().config:\\n\",\n    \"            return False\\n\",\n    \"    except ImportError:\\n\",\n    \"        return False\\n\",\n    \"    except AttributeError:\\n\",\n    \"        return False\\n\",\n    \"    return True\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9930711d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"in_notebook()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5d91bc9a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import contextlib\\n\",\n    \"import importlib\\n\",\n    \"import os\\n\",\n    \"import sys\\n\",\n    \"from datetime import datetime, timedelta\\n\",\n    \"from inspect import Parameter\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import typer\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"69fb3adc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import time\\n\",\n    \"from pathlib import Path\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"\\n\",\n    \"from fastkafka._aiokafka_imports import AIOKafkaConsumer, AIOKafkaProducer\\n\",\n    \"from nbdev_mkdocs.docstring import run_examples_from_docstring\\n\",\n    \"\\n\",\n    \"from fastkafka._application.app import FastKafka\\n\",\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka._components.test_dependencies import generate_app_src\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"254e1819\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@contextlib.contextmanager\\n\",\n    \"def change_dir(d: str) -> Generator[None, None, None]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Changes the current working directory temporarily.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        d: The directory to change to.\\n\",\n    \"\\n\",\n    \"    Yields:\\n\",\n    \"        None.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    curdir = os.getcwd()\\n\",\n    \"    os.chdir(d)\\n\",\n    \"    try:\\n\",\n    \"        yield\\n\",\n    \"    finally:\\n\",\n    \"        os.chdir(curdir)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ac1eac6b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    original_wd = os.getcwd()\\n\",\n    \"    assert original_wd != d\\n\",\n    \"    with change_dir(d):\\n\",\n    \"        assert os.getcwd() == d\\n\",\n    \"    assert os.getcwd() == original_wd\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"22ff9e03\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class ImportFromStringError(Exception):\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def _import_from_string(import_str: str) -> Any:\\n\",\n    \"    \\\"\\\"\\\"Imports library from string\\n\",\n    \"\\n\",\n    \"    Note:\\n\",\n    \"        copied from https://github.com/encode/uvicorn/blob/master/uvicorn/importer.py\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        import_str: input string in form 'main:app'\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    sys.path.append(\\\".\\\")\\n\",\n    \"\\n\",\n    \"    if not isinstance(import_str, str):\\n\",\n    \"        return import_str\\n\",\n    \"\\n\",\n    \"    module_str, _, attrs_str = import_str.partition(\\\":\\\")\\n\",\n    \"    if not module_str or not attrs_str:\\n\",\n    \"        message = (\\n\",\n    \"            'Import string \\\"{import_str}\\\" must be in format \\\"<module>:<attribute>\\\".'\\n\",\n    \"        )\\n\",\n    \"        typer.secho(f\\\"{message}\\\", err=True, fg=typer.colors.RED)\\n\",\n    \"        raise ImportFromStringError(message.format(import_str=import_str))\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"        # nosemgrep: python.lang.security.audit.non-literal-import.non-literal-import\\n\",\n    \"        module = importlib.import_module(module_str)\\n\",\n    \"    except ImportError as exc:\\n\",\n    \"        if exc.name != module_str:\\n\",\n    \"            raise exc from None\\n\",\n    \"        message = 'Could not import module \\\"{module_str}\\\".'\\n\",\n    \"        raise ImportFromStringError(message.format(module_str=module_str))\\n\",\n    \"\\n\",\n    \"    instance = module\\n\",\n    \"    try:\\n\",\n    \"        for attr_str in attrs_str.split(\\\".\\\"):\\n\",\n    \"            instance = getattr(instance, attr_str)\\n\",\n    \"    except AttributeError:\\n\",\n    \"        message = 'Attribute \\\"{attrs_str}\\\" not found in module \\\"{module_str}\\\".'\\n\",\n    \"        raise ImportFromStringError(\\n\",\n    \"            message.format(attrs_str=attrs_str, module_str=module_str)\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"    return instance\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"85e14aa3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    src_path = Path(d) / \\\"main.py\\\"\\n\",\n    \"    generate_app_src(src_path)\\n\",\n    \"    with change_dir(d):\\n\",\n    \"        kafka_app = _import_from_string(f\\\"{src_path.stem}:kafka_app\\\")\\n\",\n    \"        assert isinstance(kafka_app, FastKafka)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"77ddf011\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def true_after(seconds: Union[int, float]) -> Callable[[], bool]:\\n\",\n    \"    \\\"\\\"\\\"Function returning True after a given number of seconds\\\"\\\"\\\"\\n\",\n    \"    t = datetime.now()\\n\",\n    \"\\n\",\n    \"    def _true_after(seconds: Union[int, float] = seconds, t: datetime = t) -> bool:\\n\",\n    \"        return (datetime.now() - t) > timedelta(seconds=seconds)\\n\",\n    \"\\n\",\n    \"    return _true_after\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"44a89246\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"f = true_after(1.1)\\n\",\n    \"assert not f()\\n\",\n    \"time.sleep(1)\\n\",\n    \"assert not f()\\n\",\n    \"time.sleep(0.1)\\n\",\n    \"assert f()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"652fe0a0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"def unwrap_list_type(var_type: Union[Type, Parameter]) -> Union[Type, Parameter]:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Unwraps the type of a list.\\n\",\n    \"\\n\",\n    \"    Vars:\\n\",\n    \"        var_type: Type to unwrap.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        Unwrapped type if the given type is a list, otherwise returns the same type.\\n\",\n    \"\\n\",\n    \"    Example:\\n\",\n    \"        - Input: List[str]\\n\",\n    \"          Output: str\\n\",\n    \"        - Input: int\\n\",\n    \"          Output: int\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if hasattr(var_type, \\\"__origin__\\\") and var_type.__origin__ == list:\\n\",\n    \"        return var_type.__args__[0]  # type: ignore\\n\",\n    \"    else:\\n\",\n    \"        return var_type\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cac74052\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert unwrap_list_type(List[int]) == int\\n\",\n    \"assert unwrap_list_type(int) == int\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3d35daef\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def remove_suffix(topic: str) -> str:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Removes the suffix from a string by splitting on underscores and joining all but the last element.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: The string to remove the suffix from.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The string without the suffix.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    return \\\"_\\\".join(topic.split(\\\"_\\\")[:-1])\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"736c2261\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"assert remove_suffix(\\\"on_topic.hello_1\\\") == \\\"on_topic.hello\\\"\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/999_Helpers.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d2f0771f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _helpers\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"510c8e0d\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Helpers\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4af114fa\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import inspect\\n\",\n    \"import json\\n\",\n    \"import textwrap\\n\",\n    \"from datetime import datetime, timedelta\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import aiohttp\\n\",\n    \"import anyio\\n\",\n    \"from fastkafka._aiokafka_imports import AIOKafkaProducer, AIOKafkaConsumer\\n\",\n    \"from aiokafka.helpers import create_ssl_context\\n\",\n    \"from aiokafka.structs import RecordMetadata\\n\",\n    \"from IPython.display import Markdown\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import in_notebook\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"from fastkafka._components.meta import delegates\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b77c1814\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from fastkafka._components.logger import suppress_timestamps\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ae342228\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"if in_notebook():\\n\",\n    \"    from tqdm.notebook import tqdm\\n\",\n    \"else:\\n\",\n    \"    from tqdm import tqdm\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d9f26762\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import pytest\\n\",\n    \"from pydantic import BaseModel\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cf2b594b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"74640d74\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__, level=20)\\n\",\n    \"logger.info(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"685d0ba3\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Configuration conversions between Confluent and AIOKafka formats\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2b85a92f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(AIOKafkaProducer)\\n\",\n    \"def aiokafka2confluent(**kwargs: Dict[str, Any]) -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"Converts AIOKafka styled config dictionary into Confluence styled one\\n\",\n    \"\\n\",\n    \"    Returns (Dict[str, Any]):\\n\",\n    \"        Confluence styled config dictionary\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        bootstrap_servers (str, list(str)): a ``host[:port]`` string or list of\\n\",\n    \"            ``host[:port]`` strings that the producer should contact to\\n\",\n    \"            bootstrap initial cluster metadata. This does not have to be the\\n\",\n    \"            full node list.  It just needs to have at least one broker that will\\n\",\n    \"            respond to a Metadata API Request. Default port is 9092. If no\\n\",\n    \"            servers are specified, will default to ``localhost:9092``.\\n\",\n    \"        client_id (str): a name for this client. This string is passed in\\n\",\n    \"            each request to servers and can be used to identify specific\\n\",\n    \"            server-side log entries that correspond to this client.\\n\",\n    \"            Default: ``aiokafka-producer-#`` (appended with a unique number\\n\",\n    \"            per instance)\\n\",\n    \"        key_serializer (Callable): used to convert user-supplied keys to bytes\\n\",\n    \"            If not :data:`None`, called as ``f(key),`` should return\\n\",\n    \"            :class:`bytes`.\\n\",\n    \"            Default: :data:`None`.\\n\",\n    \"        value_serializer (Callable): used to convert user-supplied message\\n\",\n    \"            values to :class:`bytes`. If not :data:`None`, called as\\n\",\n    \"            ``f(value)``, should return :class:`bytes`.\\n\",\n    \"            Default: :data:`None`.\\n\",\n    \"        acks (Any): one of ``0``, ``1``, ``all``. The number of acknowledgments\\n\",\n    \"            the producer requires the leader to have received before considering a\\n\",\n    \"            request complete. This controls the durability of records that are\\n\",\n    \"            sent. The following settings are common:\\n\",\n    \"\\n\",\n    \"            * ``0``: Producer will not wait for any acknowledgment from the server\\n\",\n    \"              at all. The message will immediately be added to the socket\\n\",\n    \"              buffer and considered sent. No guarantee can be made that the\\n\",\n    \"              server has received the record in this case, and the retries\\n\",\n    \"              configuration will not take effect (as the client won't\\n\",\n    \"              generally know of any failures). The offset given back for each\\n\",\n    \"              record will always be set to -1.\\n\",\n    \"            * ``1``: The broker leader will write the record to its local log but\\n\",\n    \"              will respond without awaiting full acknowledgement from all\\n\",\n    \"              followers. In this case should the leader fail immediately\\n\",\n    \"              after acknowledging the record but before the followers have\\n\",\n    \"              replicated it then the record will be lost.\\n\",\n    \"            * ``all``: The broker leader will wait for the full set of in-sync\\n\",\n    \"              replicas to acknowledge the record. This guarantees that the\\n\",\n    \"              record will not be lost as long as at least one in-sync replica\\n\",\n    \"              remains alive. This is the strongest available guarantee.\\n\",\n    \"\\n\",\n    \"            If unset, defaults to ``acks=1``. If `enable_idempotence` is\\n\",\n    \"            :data:`True` defaults to ``acks=all``\\n\",\n    \"        compression_type (str): The compression type for all data generated by\\n\",\n    \"            the producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\\n\",\n    \"            or :data:`None`.\\n\",\n    \"            Compression is of full batches of data, so the efficacy of batching\\n\",\n    \"            will also impact the compression ratio (more batching means better\\n\",\n    \"            compression). Default: :data:`None`.\\n\",\n    \"        max_batch_size (int): Maximum size of buffered data per partition.\\n\",\n    \"            After this amount :meth:`send` coroutine will block until batch is\\n\",\n    \"            drained.\\n\",\n    \"            Default: 16384\\n\",\n    \"        linger_ms (int): The producer groups together any records that arrive\\n\",\n    \"            in between request transmissions into a single batched request.\\n\",\n    \"            Normally this occurs only under load when records arrive faster\\n\",\n    \"            than they can be sent out. However in some circumstances the client\\n\",\n    \"            may want to reduce the number of requests even under moderate load.\\n\",\n    \"            This setting accomplishes this by adding a small amount of\\n\",\n    \"            artificial delay; that is, if first request is processed faster,\\n\",\n    \"            than `linger_ms`, producer will wait ``linger_ms - process_time``.\\n\",\n    \"            Default: 0 (i.e. no delay).\\n\",\n    \"        partitioner (Callable): Callable used to determine which partition\\n\",\n    \"            each message is assigned to. Called (after key serialization):\\n\",\n    \"            ``partitioner(key_bytes, all_partitions, available_partitions)``.\\n\",\n    \"            The default partitioner implementation hashes each non-None key\\n\",\n    \"            using the same murmur2 algorithm as the Java client so that\\n\",\n    \"            messages with the same key are assigned to the same partition.\\n\",\n    \"            When a key is :data:`None`, the message is delivered to a random partition\\n\",\n    \"            (filtered to partitions with available leaders only, if possible).\\n\",\n    \"        max_request_size (int): The maximum size of a request. This is also\\n\",\n    \"            effectively a cap on the maximum record size. Note that the server\\n\",\n    \"            has its own cap on record size which may be different from this.\\n\",\n    \"            This setting will limit the number of record batches the producer\\n\",\n    \"            will send in a single request to avoid sending huge requests.\\n\",\n    \"            Default: 1048576.\\n\",\n    \"        metadata_max_age_ms (int): The period of time in milliseconds after\\n\",\n    \"            which we force a refresh of metadata even if we haven't seen any\\n\",\n    \"            partition leadership changes to proactively discover any new\\n\",\n    \"            brokers or partitions. Default: 300000\\n\",\n    \"        request_timeout_ms (int): Produce request timeout in milliseconds.\\n\",\n    \"            As it's sent as part of\\n\",\n    \"            :class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\\n\",\n    \"            call), maximum waiting time can be up to ``2 *\\n\",\n    \"            request_timeout_ms``.\\n\",\n    \"            Default: 40000.\\n\",\n    \"        retry_backoff_ms (int): Milliseconds to backoff when retrying on\\n\",\n    \"            errors. Default: 100.\\n\",\n    \"        api_version (str): specify which kafka API version to use.\\n\",\n    \"            If set to ``auto``, will attempt to infer the broker version by\\n\",\n    \"            probing various APIs. Default: ``auto``\\n\",\n    \"        security_protocol (str): Protocol used to communicate with brokers.\\n\",\n    \"            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\\n\",\n    \"            Default: ``PLAINTEXT``.\\n\",\n    \"        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\\n\",\n    \"            for wrapping socket connections. Directly passed into asyncio's\\n\",\n    \"            :meth:`~asyncio.loop.create_connection`. For more\\n\",\n    \"            information see :ref:`ssl_auth`.\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"        connections_max_idle_ms (int): Close idle connections after the number\\n\",\n    \"            of milliseconds specified by this config. Specifying :data:`None` will\\n\",\n    \"            disable idle checks. Default: 540000 (9 minutes).\\n\",\n    \"        enable_idempotence (bool): When set to :data:`True`, the producer will\\n\",\n    \"            ensure that exactly one copy of each message is written in the\\n\",\n    \"            stream. If :data:`False`, producer retries due to broker failures,\\n\",\n    \"            etc., may write duplicates of the retried message in the stream.\\n\",\n    \"            Note that enabling idempotence acks to set to ``all``. If it is not\\n\",\n    \"            explicitly set by the user it will be chosen. If incompatible\\n\",\n    \"            values are set, a :exc:`ValueError` will be thrown.\\n\",\n    \"            New in version 0.5.0.\\n\",\n    \"        sasl_mechanism (str): Authentication mechanism when security_protocol\\n\",\n    \"            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\\n\",\n    \"            are: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\\n\",\n    \"            ``OAUTHBEARER``.\\n\",\n    \"            Default: ``PLAIN``\\n\",\n    \"        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"        sasl_oauth_token_provider (: class:`~aiokafka.abc.AbstractTokenProvider`):\\n\",\n    \"            OAuthBearer token provider instance. (See\\n\",\n    \"            :mod:`kafka.oauth.abstract`).\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    confluent_config = {k.replace(\\\"_\\\", \\\".\\\"): v for k, v in kwargs.items()}\\n\",\n    \"    for k1, k2 in zip(\\n\",\n    \"        [\\\"sasl.plain.username\\\", \\\"sasl.plain.password\\\"],\\n\",\n    \"        [\\\"sasl.username\\\", \\\"sasl.password\\\"],\\n\",\n    \"    ):\\n\",\n    \"        if k1 in confluent_config:\\n\",\n    \"            confluent_config[k2] = confluent_config.pop(k1)\\n\",\n    \"\\n\",\n    \"    if \\\"ssl.context\\\" in confluent_config:\\n\",\n    \"        confluent_config.pop(\\\"ssl.context\\\")\\n\",\n    \"\\n\",\n    \"    return confluent_config\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fe42a4bb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"ssl_context = create_ssl_context()\\n\",\n    \"\\n\",\n    \"aiokafka_config = {\\n\",\n    \"    \\\"bootstrap_servers\\\": f\\\"kafka.staging.airt:9092\\\",\\n\",\n    \"    \\\"group_id\\\": \\\"kafka_consume_group\\\",\\n\",\n    \"    \\\"auto_offset_reset\\\": \\\"earliest\\\",\\n\",\n    \"    \\\"security_protocol\\\": \\\"SASL_SSL\\\",\\n\",\n    \"    \\\"sasl_mechanism\\\": \\\"PLAIN\\\",\\n\",\n    \"    \\\"sasl_plain_username\\\": \\\"myname\\\",\\n\",\n    \"    \\\"sasl_plain_password\\\": \\\"*************\\\",\\n\",\n    \"    \\\"ssl_context\\\": create_ssl_context(),\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"bootstrap.servers\\\": \\\"kafka.staging.airt:9092\\\",\\n\",\n    \"    \\\"group.id\\\": \\\"kafka_consume_group\\\",\\n\",\n    \"    \\\"auto.offset.reset\\\": \\\"earliest\\\",\\n\",\n    \"    \\\"security.protocol\\\": \\\"SASL_SSL\\\",\\n\",\n    \"    \\\"sasl.mechanism\\\": \\\"PLAIN\\\",\\n\",\n    \"    \\\"sasl.username\\\": \\\"myname\\\",\\n\",\n    \"    \\\"sasl.password\\\": \\\"*************\\\",\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"confluent_config = aiokafka2confluent(**aiokafka_config)\\n\",\n    \"assert confluent_config == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b016537f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def confluent2aiokafka(confluent_config: Dict[str, Any]) -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"Converts AIOKafka styled config dictionary into Confluence styled one\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        confluent_config: Confluence styled config dictionary\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        AIOKafka styled config dictionary\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    aiokafka_config = {k.replace(\\\".\\\", \\\"_\\\"): v for k, v in confluent_config.items()}\\n\",\n    \"    for k1, k2 in zip(\\n\",\n    \"        [\\\"sasl_username\\\", \\\"sasl_password\\\"],\\n\",\n    \"        [\\\"sasl_plain_username\\\", \\\"sasl_plain_password\\\"],\\n\",\n    \"    ):\\n\",\n    \"        if k1 in aiokafka_config:\\n\",\n    \"            aiokafka_config[k2] = aiokafka_config.pop(k1)\\n\",\n    \"\\n\",\n    \"    if \\\"sasl_plain_username\\\" in aiokafka_config:\\n\",\n    \"        aiokafka_config[\\\"ssl.context\\\"] = (create_ssl_context(),)\\n\",\n    \"\\n\",\n    \"    return aiokafka_config\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0787bb4f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"confluent_config = {\\n\",\n    \"    \\\"bootstrap.servers\\\": \\\"kafka.staging.airt:9092\\\",\\n\",\n    \"    \\\"group.id\\\": \\\"kafka_consume_group\\\",\\n\",\n    \"    \\\"auto.offset.reset\\\": \\\"earliest\\\",\\n\",\n    \"    \\\"security.protocol\\\": \\\"SASL_SSL\\\",\\n\",\n    \"    \\\"sasl.mechanism\\\": \\\"PLAIN\\\",\\n\",\n    \"    \\\"sasl.username\\\": \\\"myname\\\",\\n\",\n    \"    \\\"sasl.password\\\": \\\"*************\\\",\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"bootstrap_servers\\\": \\\"kafka.staging.airt:9092\\\",\\n\",\n    \"    \\\"group_id\\\": \\\"kafka_consume_group\\\",\\n\",\n    \"    \\\"auto_offset_reset\\\": \\\"earliest\\\",\\n\",\n    \"    \\\"security_protocol\\\": \\\"SASL_SSL\\\",\\n\",\n    \"    \\\"sasl_mechanism\\\": \\\"PLAIN\\\",\\n\",\n    \"    \\\"sasl_plain_username\\\": \\\"myname\\\",\\n\",\n    \"    \\\"sasl_plain_password\\\": \\\"*************\\\",\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"aiokafka_config = confluent2aiokafka(confluent_config)\\n\",\n    \"\\n\",\n    \"aiokafka_config.pop(\\\"ssl.context\\\")\\n\",\n    \"\\n\",\n    \"assert aiokafka_config == expected\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f29bfa31\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Producing and consuming messages\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cd699bdd\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(AIOKafkaProducer)\\n\",\n    \"async def produce_messages(  # type: ignore\\n\",\n    \"    *,\\n\",\n    \"    topic: str,\\n\",\n    \"    msgs: List[Any],\\n\",\n    \"    **kwargs: Dict[str, Any],\\n\",\n    \") -> List[RecordMetadata]:\\n\",\n    \"    \\\"\\\"\\\"Produces messages to Kafka topic\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        topic: Topic name\\n\",\n    \"        msgs: a list of messages to produce\\n\",\n    \"        bootstrap_servers (str, list(str)): a ``host[:port]`` string or list of\\n\",\n    \"            ``host[:port]`` strings that the producer should contact to\\n\",\n    \"            bootstrap initial cluster metadata. This does not have to be the\\n\",\n    \"            full node list.  It just needs to have at least one broker that will\\n\",\n    \"            respond to a Metadata API Request. Default port is 9092. If no\\n\",\n    \"            servers are specified, will default to ``localhost:9092``.\\n\",\n    \"        client_id (str): a name for this client. This string is passed in\\n\",\n    \"            each request to servers and can be used to identify specific\\n\",\n    \"            server-side log entries that correspond to this client.\\n\",\n    \"            Default: ``aiokafka-producer-#`` (appended with a unique number\\n\",\n    \"            per instance)\\n\",\n    \"        key_serializer (Callable): used to convert user-supplied keys to bytes\\n\",\n    \"            If not :data:`None`, called as ``f(key),`` should return\\n\",\n    \"            :class:`bytes`.\\n\",\n    \"            Default: :data:`None`.\\n\",\n    \"        value_serializer (Callable): used to convert user-supplied message\\n\",\n    \"            values to :class:`bytes`. If not :data:`None`, called as\\n\",\n    \"            ``f(value)``, should return :class:`bytes`.\\n\",\n    \"            Default: :data:`None`.\\n\",\n    \"        acks (Any): one of ``0``, ``1``, ``all``. The number of acknowledgments\\n\",\n    \"            the producer requires the leader to have received before considering a\\n\",\n    \"            request complete. This controls the durability of records that are\\n\",\n    \"            sent. The following settings are common:\\n\",\n    \"\\n\",\n    \"            * ``0``: Producer will not wait for any acknowledgment from the server\\n\",\n    \"              at all. The message will immediately be added to the socket\\n\",\n    \"              buffer and considered sent. No guarantee can be made that the\\n\",\n    \"              server has received the record in this case, and the retries\\n\",\n    \"              configuration will not take effect (as the client won't\\n\",\n    \"              generally know of any failures). The offset given back for each\\n\",\n    \"              record will always be set to -1.\\n\",\n    \"            * ``1``: The broker leader will write the record to its local log but\\n\",\n    \"              will respond without awaiting full acknowledgement from all\\n\",\n    \"              followers. In this case should the leader fail immediately\\n\",\n    \"              after acknowledging the record but before the followers have\\n\",\n    \"              replicated it then the record will be lost.\\n\",\n    \"            * ``all``: The broker leader will wait for the full set of in-sync\\n\",\n    \"              replicas to acknowledge the record. This guarantees that the\\n\",\n    \"              record will not be lost as long as at least one in-sync replica\\n\",\n    \"              remains alive. This is the strongest available guarantee.\\n\",\n    \"\\n\",\n    \"            If unset, defaults to ``acks=1``. If `enable_idempotence` is\\n\",\n    \"            :data:`True` defaults to ``acks=all``\\n\",\n    \"        compression_type (str): The compression type for all data generated by\\n\",\n    \"            the producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\\n\",\n    \"            or :data:`None`.\\n\",\n    \"            Compression is of full batches of data, so the efficacy of batching\\n\",\n    \"            will also impact the compression ratio (more batching means better\\n\",\n    \"            compression). Default: :data:`None`.\\n\",\n    \"        max_batch_size (int): Maximum size of buffered data per partition.\\n\",\n    \"            After this amount :meth:`send` coroutine will block until batch is\\n\",\n    \"            drained.\\n\",\n    \"            Default: 16384\\n\",\n    \"        linger_ms (int): The producer groups together any records that arrive\\n\",\n    \"            in between request transmissions into a single batched request.\\n\",\n    \"            Normally this occurs only under load when records arrive faster\\n\",\n    \"            than they can be sent out. However in some circumstances the client\\n\",\n    \"            may want to reduce the number of requests even under moderate load.\\n\",\n    \"            This setting accomplishes this by adding a small amount of\\n\",\n    \"            artificial delay; that is, if first request is processed faster,\\n\",\n    \"            than `linger_ms`, producer will wait ``linger_ms - process_time``.\\n\",\n    \"            Default: 0 (i.e. no delay).\\n\",\n    \"        partitioner (Callable): Callable used to determine which partition\\n\",\n    \"            each message is assigned to. Called (after key serialization):\\n\",\n    \"            ``partitioner(key_bytes, all_partitions, available_partitions)``.\\n\",\n    \"            The default partitioner implementation hashes each non-None key\\n\",\n    \"            using the same murmur2 algorithm as the Java client so that\\n\",\n    \"            messages with the same key are assigned to the same partition.\\n\",\n    \"            When a key is :data:`None`, the message is delivered to a random partition\\n\",\n    \"            (filtered to partitions with available leaders only, if possible).\\n\",\n    \"        max_request_size (int): The maximum size of a request. This is also\\n\",\n    \"            effectively a cap on the maximum record size. Note that the server\\n\",\n    \"            has its own cap on record size which may be different from this.\\n\",\n    \"            This setting will limit the number of record batches the producer\\n\",\n    \"            will send in a single request to avoid sending huge requests.\\n\",\n    \"            Default: 1048576.\\n\",\n    \"        metadata_max_age_ms (int): The period of time in milliseconds after\\n\",\n    \"            which we force a refresh of metadata even if we haven't seen any\\n\",\n    \"            partition leadership changes to proactively discover any new\\n\",\n    \"            brokers or partitions. Default: 300000\\n\",\n    \"        request_timeout_ms (int): Produce request timeout in milliseconds.\\n\",\n    \"            As it's sent as part of\\n\",\n    \"            :class:`~kafka.protocol.produce.ProduceRequest` (it's a blocking\\n\",\n    \"            call), maximum waiting time can be up to ``2 *\\n\",\n    \"            request_timeout_ms``.\\n\",\n    \"            Default: 40000.\\n\",\n    \"        retry_backoff_ms (int): Milliseconds to backoff when retrying on\\n\",\n    \"            errors. Default: 100.\\n\",\n    \"        api_version (str): specify which kafka API version to use.\\n\",\n    \"            If set to ``auto``, will attempt to infer the broker version by\\n\",\n    \"            probing various APIs. Default: ``auto``\\n\",\n    \"        security_protocol (str): Protocol used to communicate with brokers.\\n\",\n    \"            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\\n\",\n    \"            Default: ``PLAINTEXT``.\\n\",\n    \"        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\\n\",\n    \"            for wrapping socket connections. Directly passed into asyncio's\\n\",\n    \"            :meth:`~asyncio.loop.create_connection`. For more\\n\",\n    \"            information see :ref:`ssl_auth`.\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"        connections_max_idle_ms (int): Close idle connections after the number\\n\",\n    \"            of milliseconds specified by this config. Specifying :data:`None` will\\n\",\n    \"            disable idle checks. Default: 540000 (9 minutes).\\n\",\n    \"        enable_idempotence (bool): When set to :data:`True`, the producer will\\n\",\n    \"            ensure that exactly one copy of each message is written in the\\n\",\n    \"            stream. If :data:`False`, producer retries due to broker failures,\\n\",\n    \"            etc., may write duplicates of the retried message in the stream.\\n\",\n    \"            Note that enabling idempotence acks to set to ``all``. If it is not\\n\",\n    \"            explicitly set by the user it will be chosen. If incompatible\\n\",\n    \"            values are set, a :exc:`ValueError` will be thrown.\\n\",\n    \"            New in version 0.5.0.\\n\",\n    \"        sasl_mechanism (str): Authentication mechanism when security_protocol\\n\",\n    \"            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values\\n\",\n    \"            are: ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\\n\",\n    \"            ``OAUTHBEARER``.\\n\",\n    \"            Default: ``PLAIN``\\n\",\n    \"        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"        sasl_oauth_token_provider (: class:`~aiokafka.abc.AbstractTokenProvider`):\\n\",\n    \"            OAuthBearer token provider instance. (See\\n\",\n    \"            :mod:`kafka.oauth.abstract`).\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    p = AIOKafkaProducer(**kwargs)\\n\",\n    \"    await p.start()\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"\\n\",\n    \"        def prepare_msg(msg: Any) -> bytes:\\n\",\n    \"            if isinstance(msg, bytes):\\n\",\n    \"                return msg\\n\",\n    \"            elif isinstance(msg, str):\\n\",\n    \"                return msg.encode(\\\"utf-8\\\")\\n\",\n    \"            elif hasattr(msg, \\\"json\\\"):\\n\",\n    \"                return msg.json().encode(\\\"utf-8\\\")  # type: ignore\\n\",\n    \"            return json.dumps(msg).encode(\\\"utf-8\\\")\\n\",\n    \"\\n\",\n    \"        fx = [\\n\",\n    \"            await p.send(topic, prepare_msg(msg))\\n\",\n    \"            for msg in tqdm(msgs, desc=f\\\"producing to '{topic}'\\\")\\n\",\n    \"        ]\\n\",\n    \"        delivery = [await f for f in fx]\\n\",\n    \"        return delivery\\n\",\n    \"    finally:\\n\",\n    \"        await p.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"122e4eb1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# print(combine_params(produce_messages, AIOKafkaProducer).__doc__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"338d0a92\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"25c5bbb2b11f44f3a3ea78dac14454a4\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_produce_messages':   0%|          | 0/120000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 2864...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 2864 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 2490...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 2490 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"class Hello(BaseModel):\\n\",\n    \"    msg: str\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"msgs_count = 120_000\\n\",\n    \"msgs = (\\n\",\n    \"    [b\\\"Hello world bytes\\\" for _ in range(msgs_count // 3)]\\n\",\n    \"    + [f\\\"Hello world as string for the {i+1}. time!\\\" for i in range(msgs_count // 3)]\\n\",\n    \"    + [\\n\",\n    \"        Hello(msg=\\\"Hello workd as Pydantic object for the {i+1}. time!\\\")\\n\",\n    \"        for i in range(msgs_count // 3)\\n\",\n    \"    ]\\n\",\n    \")\\n\",\n    \"async with ApacheKafkaBroker(topics=[\\\"test_produce_messages\\\"], listener_port=9992) as bootstrap_server:\\n\",\n    \"    delivery_report = await produce_messages(\\n\",\n    \"        msgs=msgs, topic=\\\"test_produce_messages\\\", bootstrap_servers=bootstrap_server\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7ae30572\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(AIOKafkaConsumer)\\n\",\n    \"async def consumes_messages(\\n\",\n    \"    *,\\n\",\n    \"    topic: str,\\n\",\n    \"    msgs_count: int,\\n\",\n    \"    **kwargs: Dict[str, Any],\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"Consumes messages\\n\",\n    \"    Args:\\n\",\n    \"        topic: Topic name\\n\",\n    \"        msgs_count: number of messages to consume before returning\\n\",\n    \"        *topics (list(str)): optional list of topics to subscribe to. If not set,\\n\",\n    \"            call :meth:`.subscribe` or :meth:`.assign` before consuming records.\\n\",\n    \"            Passing topics directly is same as calling :meth:`.subscribe` API.\\n\",\n    \"        bootstrap_servers (str, list(str)): a ``host[:port]`` string (or list of\\n\",\n    \"            ``host[:port]`` strings) that the consumer should contact to bootstrap\\n\",\n    \"            initial cluster metadata.\\n\",\n    \"\\n\",\n    \"            This does not have to be the full node list.\\n\",\n    \"            It just needs to have at least one broker that will respond to a\\n\",\n    \"            Metadata API Request. Default port is 9092. If no servers are\\n\",\n    \"            specified, will default to ``localhost:9092``.\\n\",\n    \"        client_id (str): a name for this client. This string is passed in\\n\",\n    \"            each request to servers and can be used to identify specific\\n\",\n    \"            server-side log entries that correspond to this client. Also\\n\",\n    \"            submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\\n\",\n    \"            for logging with respect to consumer group administration. Default:\\n\",\n    \"            ``aiokafka-{version}``\\n\",\n    \"        group_id (str or None): name of the consumer group to join for dynamic\\n\",\n    \"            partition assignment (if enabled), and to use for fetching and\\n\",\n    \"            committing offsets. If None, auto-partition assignment (via\\n\",\n    \"            group coordinator) and offset commits are disabled.\\n\",\n    \"            Default: None\\n\",\n    \"        key_deserializer (Callable): Any callable that takes a\\n\",\n    \"            raw message key and returns a deserialized key.\\n\",\n    \"        value_deserializer (Callable, Optional): Any callable that takes a\\n\",\n    \"            raw message value and returns a deserialized value.\\n\",\n    \"        fetch_min_bytes (int): Minimum amount of data the server should\\n\",\n    \"            return for a fetch request, otherwise wait up to\\n\",\n    \"            `fetch_max_wait_ms` for more data to accumulate. Default: 1.\\n\",\n    \"        fetch_max_bytes (int): The maximum amount of data the server should\\n\",\n    \"            return for a fetch request. This is not an absolute maximum, if\\n\",\n    \"            the first message in the first non-empty partition of the fetch\\n\",\n    \"            is larger than this value, the message will still be returned\\n\",\n    \"            to ensure that the consumer can make progress. NOTE: consumer\\n\",\n    \"            performs fetches to multiple brokers in parallel so memory\\n\",\n    \"            usage will depend on the number of brokers containing\\n\",\n    \"            partitions for the topic.\\n\",\n    \"            Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\\n\",\n    \"        fetch_max_wait_ms (int): The maximum amount of time in milliseconds\\n\",\n    \"            the server will block before answering the fetch request if\\n\",\n    \"            there isn't sufficient data to immediately satisfy the\\n\",\n    \"            requirement given by fetch_min_bytes. Default: 500.\\n\",\n    \"        max_partition_fetch_bytes (int): The maximum amount of data\\n\",\n    \"            per-partition the server will return. The maximum total memory\\n\",\n    \"            used for a request ``= #partitions * max_partition_fetch_bytes``.\\n\",\n    \"            This size must be at least as large as the maximum message size\\n\",\n    \"            the server allows or else it is possible for the producer to\\n\",\n    \"            send messages larger than the consumer can fetch. If that\\n\",\n    \"            happens, the consumer can get stuck trying to fetch a large\\n\",\n    \"            message on a certain partition. Default: 1048576.\\n\",\n    \"        max_poll_records (int): The maximum number of records returned in a\\n\",\n    \"            single call to :meth:`.getmany`. Defaults ``None``, no limit.\\n\",\n    \"        request_timeout_ms (int): Client request timeout in milliseconds.\\n\",\n    \"            Default: 40000.\\n\",\n    \"        retry_backoff_ms (int): Milliseconds to backoff when retrying on\\n\",\n    \"            errors. Default: 100.\\n\",\n    \"        auto_offset_reset (str): A policy for resetting offsets on\\n\",\n    \"            :exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\\n\",\n    \"            available message, ``latest`` will move to the most recent, and\\n\",\n    \"            ``none`` will raise an exception so you can handle this case.\\n\",\n    \"            Default: ``latest``.\\n\",\n    \"        enable_auto_commit (bool): If true the consumer's offset will be\\n\",\n    \"            periodically committed in the background. Default: True.\\n\",\n    \"        auto_commit_interval_ms (int): milliseconds between automatic\\n\",\n    \"            offset commits, if enable_auto_commit is True. Default: 5000.\\n\",\n    \"        check_crcs (bool): Automatically check the CRC32 of the records\\n\",\n    \"            consumed. This ensures no on-the-wire or on-disk corruption to\\n\",\n    \"            the messages occurred. This check adds some overhead, so it may\\n\",\n    \"            be disabled in cases seeking extreme performance. Default: True\\n\",\n    \"        metadata_max_age_ms (int): The period of time in milliseconds after\\n\",\n    \"            which we force a refresh of metadata even if we haven't seen any\\n\",\n    \"            partition leadership changes to proactively discover any new\\n\",\n    \"            brokers or partitions. Default: 300000\\n\",\n    \"        partition_assignment_strategy (list): List of objects to use to\\n\",\n    \"            distribute partition ownership amongst consumer instances when\\n\",\n    \"            group management is used. This preference is implicit in the order\\n\",\n    \"            of the strategies in the list. When assignment strategy changes:\\n\",\n    \"            to support a change to the assignment strategy, new versions must\\n\",\n    \"            enable support both for the old assignment strategy and the new\\n\",\n    \"            one. The coordinator will choose the old assignment strategy until\\n\",\n    \"            all members have been updated. Then it will choose the new\\n\",\n    \"            strategy. Default: [:class:`.RoundRobinPartitionAssignor`]\\n\",\n    \"        max_poll_interval_ms (int): Maximum allowed time between calls to\\n\",\n    \"            consume messages (e.g., :meth:`.getmany`). If this interval\\n\",\n    \"            is exceeded the consumer is considered failed and the group will\\n\",\n    \"            rebalance in order to reassign the partitions to another consumer\\n\",\n    \"            group member. If API methods block waiting for messages, that time\\n\",\n    \"            does not count against this timeout. See `KIP-62`_ for more\\n\",\n    \"            information. Default 300000\\n\",\n    \"        rebalance_timeout_ms (int): The maximum time server will wait for this\\n\",\n    \"            consumer to rejoin the group in a case of rebalance. In Java client\\n\",\n    \"            this behaviour is bound to `max.poll.interval.ms` configuration,\\n\",\n    \"            but as ``aiokafka`` will rejoin the group in the background, we\\n\",\n    \"            decouple this setting to allow finer tuning by users that use\\n\",\n    \"            :class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\\n\",\n    \"            to ``session_timeout_ms``\\n\",\n    \"        session_timeout_ms (int): Client group session and failure detection\\n\",\n    \"            timeout. The consumer sends periodic heartbeats\\n\",\n    \"            (`heartbeat.interval.ms`) to indicate its liveness to the broker.\\n\",\n    \"            If no hearts are received by the broker for a group member within\\n\",\n    \"            the session timeout, the broker will remove the consumer from the\\n\",\n    \"            group and trigger a rebalance. The allowed range is configured with\\n\",\n    \"            the **broker** configuration properties\\n\",\n    \"            `group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\\n\",\n    \"            Default: 10000\\n\",\n    \"        heartbeat_interval_ms (int): The expected time in milliseconds\\n\",\n    \"            between heartbeats to the consumer coordinator when using\\n\",\n    \"            Kafka's group management feature. Heartbeats are used to ensure\\n\",\n    \"            that the consumer's session stays active and to facilitate\\n\",\n    \"            rebalancing when new consumers join or leave the group. The\\n\",\n    \"            value must be set lower than `session_timeout_ms`, but typically\\n\",\n    \"            should be set no higher than 1/3 of that value. It can be\\n\",\n    \"            adjusted even lower to control the expected time for normal\\n\",\n    \"            rebalances. Default: 3000\\n\",\n    \"        consumer_timeout_ms (int): maximum wait timeout for background fetching\\n\",\n    \"            routine. Mostly defines how fast the system will see rebalance and\\n\",\n    \"            request new data for new partitions. Default: 200\\n\",\n    \"        api_version (str): specify which kafka API version to use.\\n\",\n    \"            :class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\\n\",\n    \"            If set to ``auto``, will attempt to infer the broker version by\\n\",\n    \"            probing various APIs. Default: ``auto``\\n\",\n    \"        security_protocol (str): Protocol used to communicate with brokers.\\n\",\n    \"            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\\n\",\n    \"        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\\n\",\n    \"            for wrapping socket connections. Directly passed into asyncio's\\n\",\n    \"            :meth:`~asyncio.loop.create_connection`. For more information see\\n\",\n    \"            :ref:`ssl_auth`. Default: None.\\n\",\n    \"        exclude_internal_topics (bool): Whether records from internal topics\\n\",\n    \"            (such as offsets) should be exposed to the consumer. If set to True\\n\",\n    \"            the only way to receive records from an internal topic is\\n\",\n    \"            subscribing to it. Requires 0.10+ Default: True\\n\",\n    \"        connections_max_idle_ms (int): Close idle connections after the number\\n\",\n    \"            of milliseconds specified by this config. Specifying `None` will\\n\",\n    \"            disable idle checks. Default: 540000 (9 minutes).\\n\",\n    \"        isolation_level (str): Controls how to read messages written\\n\",\n    \"            transactionally.\\n\",\n    \"\\n\",\n    \"            If set to ``read_committed``, :meth:`.getmany` will only return\\n\",\n    \"            transactional messages which have been committed.\\n\",\n    \"            If set to ``read_uncommitted`` (the default), :meth:`.getmany` will\\n\",\n    \"            return all messages, even transactional messages which have been\\n\",\n    \"            aborted.\\n\",\n    \"\\n\",\n    \"            Non-transactional messages will be returned unconditionally in\\n\",\n    \"            either mode.\\n\",\n    \"\\n\",\n    \"            Messages will always be returned in offset order. Hence, in\\n\",\n    \"            `read_committed` mode, :meth:`.getmany` will only return\\n\",\n    \"            messages up to the last stable offset (LSO), which is the one less\\n\",\n    \"            than the offset of the first open transaction. In particular any\\n\",\n    \"            messages appearing after messages belonging to ongoing transactions\\n\",\n    \"            will be withheld until the relevant transaction has been completed.\\n\",\n    \"            As a result, `read_committed` consumers will not be able to read up\\n\",\n    \"            to the high watermark when there are in flight transactions.\\n\",\n    \"            Further, when in `read_committed` the seek_to_end method will\\n\",\n    \"            return the LSO. See method docs below. Default: ``read_uncommitted``\\n\",\n    \"        sasl_mechanism (str): Authentication mechanism when security_protocol\\n\",\n    \"            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\\n\",\n    \"            ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\\n\",\n    \"            ``OAUTHBEARER``.\\n\",\n    \"            Default: ``PLAIN``\\n\",\n    \"        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: None\\n\",\n    \"        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: None\\n\",\n    \"        sasl_oauth_token_provider (~aiokafka.abc.AbstractTokenProvider): OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\\n\",\n    \"            Default: None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    consumer = AIOKafkaConsumer(topic, **kwargs)\\n\",\n    \"    await consumer.start()\\n\",\n    \"    try:\\n\",\n    \"        with tqdm(total=msgs_count, desc=f\\\"consuming from '{topic}'\\\") as pbar:\\n\",\n    \"            async for msg in consumer:\\n\",\n    \"                pbar.update(1)\\n\",\n    \"                if pbar.n >= pbar.total:\\n\",\n    \"                    break\\n\",\n    \"    finally:\\n\",\n    \"        await consumer.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"29cddabb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# print(combine_params(consumes_messages, AIOKafkaConsumer).__doc__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d33f1b9a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'test_consume_messages'})\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"27758f3f1c484200b92d0a71907d9563\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'test_consume_messages':   0%|          | 0/120000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'test_consume_messages': 1}. \\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"85c976608f344a4bbfbb862447afa55f\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"consuming from 'test_consume_messages':   0%|          | 0/108000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 4073...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 4073 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 3701...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 3701 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async with ApacheKafkaBroker(topics=[\\\"test_consume_messages\\\"], listener_port=9992) as bootstrap_server:\\n\",\n    \"    async with anyio.create_task_group() as tg:\\n\",\n    \"        tg.start_soon(\\n\",\n    \"            lambda d: produce_messages(**d),\\n\",\n    \"            dict(\\n\",\n    \"                msgs=msgs,\\n\",\n    \"                topic=\\\"test_consume_messages\\\",\\n\",\n    \"                bootstrap_servers=bootstrap_server,\\n\",\n    \"            ),\\n\",\n    \"        )\\n\",\n    \"        tg.start_soon(\\n\",\n    \"            lambda d: consumes_messages(**d),\\n\",\n    \"            dict(\\n\",\n    \"                msgs_count=int(len(msgs) * 0.9),\\n\",\n    \"                topic=\\\"test_consume_messages\\\",\\n\",\n    \"                bootstrap_servers=bootstrap_server,\\n\",\n    \"            ),\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"17d70e10\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@delegates(AIOKafkaConsumer)\\n\",\n    \"@delegates(AIOKafkaProducer, keep=True)\\n\",\n    \"async def produce_and_consume_messages(\\n\",\n    \"    *,\\n\",\n    \"    produce_topic: str,\\n\",\n    \"    consume_topic: str,\\n\",\n    \"    msgs: List[Any],\\n\",\n    \"    msgs_count: int,\\n\",\n    \"    **kwargs: Dict[str, Any],\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"produce_and_consume_messages\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        produce_topic: Topic name for producing messages\\n\",\n    \"        consume_topic: Topic name for consuming messages\\n\",\n    \"        msgs: a list of messages to produce\\n\",\n    \"        msgs_count: number of messages to consume before returning\\n\",\n    \"        bootstrap_servers (str, list(str)): a ``host[:port]`` string (or list of\\n\",\n    \"            ``host[:port]`` strings) that the consumer should contact to bootstrap\\n\",\n    \"            initial cluster metadata.\\n\",\n    \"\\n\",\n    \"            This does not have to be the full node list.\\n\",\n    \"            It just needs to have at least one broker that will respond to a\\n\",\n    \"            Metadata API Request. Default port is 9092. If no servers are\\n\",\n    \"            specified, will default to ``localhost:9092``.\\n\",\n    \"        client_id (str): a name for this client. This string is passed in\\n\",\n    \"            each request to servers and can be used to identify specific\\n\",\n    \"            server-side log entries that correspond to this client. Also\\n\",\n    \"            submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`\\n\",\n    \"            for logging with respect to consumer group administration. Default:\\n\",\n    \"            ``aiokafka-{version}``\\n\",\n    \"        group_id (str or None): name of the consumer group to join for dynamic\\n\",\n    \"            partition assignment (if enabled), and to use for fetching and\\n\",\n    \"            committing offsets. If None, auto-partition assignment (via\\n\",\n    \"            group coordinator) and offset commits are disabled.\\n\",\n    \"            Default: None\\n\",\n    \"        key_deserializer (Callable): Any callable that takes a\\n\",\n    \"            raw message key and returns a deserialized key.\\n\",\n    \"        value_deserializer (Callable, Optional): Any callable that takes a\\n\",\n    \"            raw message value and returns a deserialized value.\\n\",\n    \"        fetch_min_bytes (int): Minimum amount of data the server should\\n\",\n    \"            return for a fetch request, otherwise wait up to\\n\",\n    \"            `fetch_max_wait_ms` for more data to accumulate. Default: 1.\\n\",\n    \"        fetch_max_bytes (int): The maximum amount of data the server should\\n\",\n    \"            return for a fetch request. This is not an absolute maximum, if\\n\",\n    \"            the first message in the first non-empty partition of the fetch\\n\",\n    \"            is larger than this value, the message will still be returned\\n\",\n    \"            to ensure that the consumer can make progress. NOTE: consumer\\n\",\n    \"            performs fetches to multiple brokers in parallel so memory\\n\",\n    \"            usage will depend on the number of brokers containing\\n\",\n    \"            partitions for the topic.\\n\",\n    \"            Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).\\n\",\n    \"        fetch_max_wait_ms (int): The maximum amount of time in milliseconds\\n\",\n    \"            the server will block before answering the fetch request if\\n\",\n    \"            there isn't sufficient data to immediately satisfy the\\n\",\n    \"            requirement given by fetch_min_bytes. Default: 500.\\n\",\n    \"        max_partition_fetch_bytes (int): The maximum amount of data\\n\",\n    \"            per-partition the server will return. The maximum total memory\\n\",\n    \"            used for a request ``= #partitions * max_partition_fetch_bytes``.\\n\",\n    \"            This size must be at least as large as the maximum message size\\n\",\n    \"            the server allows or else it is possible for the producer to\\n\",\n    \"            send messages larger than the consumer can fetch. If that\\n\",\n    \"            happens, the consumer can get stuck trying to fetch a large\\n\",\n    \"            message on a certain partition. Default: 1048576.\\n\",\n    \"        max_poll_records (int): The maximum number of records returned in a\\n\",\n    \"            single call to :meth:`.getmany`. Defaults ``None``, no limit.\\n\",\n    \"        request_timeout_ms (int): Client request timeout in milliseconds.\\n\",\n    \"            Default: 40000.\\n\",\n    \"        retry_backoff_ms (int): Milliseconds to backoff when retrying on\\n\",\n    \"            errors. Default: 100.\\n\",\n    \"        auto_offset_reset (str): A policy for resetting offsets on\\n\",\n    \"            :exc:`.OffsetOutOfRangeError` errors: ``earliest`` will move to the oldest\\n\",\n    \"            available message, ``latest`` will move to the most recent, and\\n\",\n    \"            ``none`` will raise an exception so you can handle this case.\\n\",\n    \"            Default: ``latest``.\\n\",\n    \"        enable_auto_commit (bool): If true the consumer's offset will be\\n\",\n    \"            periodically committed in the background. Default: True.\\n\",\n    \"        auto_commit_interval_ms (int): milliseconds between automatic\\n\",\n    \"            offset commits, if enable_auto_commit is True. Default: 5000.\\n\",\n    \"        check_crcs (bool): Automatically check the CRC32 of the records\\n\",\n    \"            consumed. This ensures no on-the-wire or on-disk corruption to\\n\",\n    \"            the messages occurred. This check adds some overhead, so it may\\n\",\n    \"            be disabled in cases seeking extreme performance. Default: True\\n\",\n    \"        metadata_max_age_ms (int): The period of time in milliseconds after\\n\",\n    \"            which we force a refresh of metadata even if we haven't seen any\\n\",\n    \"            partition leadership changes to proactively discover any new\\n\",\n    \"            brokers or partitions. Default: 300000\\n\",\n    \"        partition_assignment_strategy (list): List of objects to use to\\n\",\n    \"            distribute partition ownership amongst consumer instances when\\n\",\n    \"            group management is used. This preference is implicit in the order\\n\",\n    \"            of the strategies in the list. When assignment strategy changes:\\n\",\n    \"            to support a change to the assignment strategy, new versions must\\n\",\n    \"            enable support both for the old assignment strategy and the new\\n\",\n    \"            one. The coordinator will choose the old assignment strategy until\\n\",\n    \"            all members have been updated. Then it will choose the new\\n\",\n    \"            strategy. Default: [:class:`.RoundRobinPartitionAssignor`]\\n\",\n    \"        max_poll_interval_ms (int): Maximum allowed time between calls to\\n\",\n    \"            consume messages (e.g., :meth:`.getmany`). If this interval\\n\",\n    \"            is exceeded the consumer is considered failed and the group will\\n\",\n    \"            rebalance in order to reassign the partitions to another consumer\\n\",\n    \"            group member. If API methods block waiting for messages, that time\\n\",\n    \"            does not count against this timeout. See `KIP-62`_ for more\\n\",\n    \"            information. Default 300000\\n\",\n    \"        rebalance_timeout_ms (int): The maximum time server will wait for this\\n\",\n    \"            consumer to rejoin the group in a case of rebalance. In Java client\\n\",\n    \"            this behaviour is bound to `max.poll.interval.ms` configuration,\\n\",\n    \"            but as ``aiokafka`` will rejoin the group in the background, we\\n\",\n    \"            decouple this setting to allow finer tuning by users that use\\n\",\n    \"            :class:`.ConsumerRebalanceListener` to delay rebalacing. Defaults\\n\",\n    \"            to ``session_timeout_ms``\\n\",\n    \"        session_timeout_ms (int): Client group session and failure detection\\n\",\n    \"            timeout. The consumer sends periodic heartbeats\\n\",\n    \"            (`heartbeat.interval.ms`) to indicate its liveness to the broker.\\n\",\n    \"            If no hearts are received by the broker for a group member within\\n\",\n    \"            the session timeout, the broker will remove the consumer from the\\n\",\n    \"            group and trigger a rebalance. The allowed range is configured with\\n\",\n    \"            the **broker** configuration properties\\n\",\n    \"            `group.min.session.timeout.ms` and `group.max.session.timeout.ms`.\\n\",\n    \"            Default: 10000\\n\",\n    \"        heartbeat_interval_ms (int): The expected time in milliseconds\\n\",\n    \"            between heartbeats to the consumer coordinator when using\\n\",\n    \"            Kafka's group management feature. Heartbeats are used to ensure\\n\",\n    \"            that the consumer's session stays active and to facilitate\\n\",\n    \"            rebalancing when new consumers join or leave the group. The\\n\",\n    \"            value must be set lower than `session_timeout_ms`, but typically\\n\",\n    \"            should be set no higher than 1/3 of that value. It can be\\n\",\n    \"            adjusted even lower to control the expected time for normal\\n\",\n    \"            rebalances. Default: 3000\\n\",\n    \"        consumer_timeout_ms (int): maximum wait timeout for background fetching\\n\",\n    \"            routine. Mostly defines how fast the system will see rebalance and\\n\",\n    \"            request new data for new partitions. Default: 200\\n\",\n    \"        api_version (str): specify which kafka API version to use.\\n\",\n    \"            :class:`AIOKafkaConsumer` supports Kafka API versions >=0.9 only.\\n\",\n    \"            If set to ``auto``, will attempt to infer the broker version by\\n\",\n    \"            probing various APIs. Default: ``auto``\\n\",\n    \"        security_protocol (str): Protocol used to communicate with brokers.\\n\",\n    \"            Valid values are: ``PLAINTEXT``, ``SSL``. Default: ``PLAINTEXT``.\\n\",\n    \"        ssl_context (ssl.SSLContext): pre-configured :class:`~ssl.SSLContext`\\n\",\n    \"            for wrapping socket connections. Directly passed into asyncio's\\n\",\n    \"            :meth:`~asyncio.loop.create_connection`. For more information see\\n\",\n    \"            :ref:`ssl_auth`. Default: None.\\n\",\n    \"        exclude_internal_topics (bool): Whether records from internal topics\\n\",\n    \"            (such as offsets) should be exposed to the consumer. If set to True\\n\",\n    \"            the only way to receive records from an internal topic is\\n\",\n    \"            subscribing to it. Requires 0.10+ Default: True\\n\",\n    \"        connections_max_idle_ms (int): Close idle connections after the number\\n\",\n    \"            of milliseconds specified by this config. Specifying `None` will\\n\",\n    \"            disable idle checks. Default: 540000 (9 minutes).\\n\",\n    \"        isolation_level (str): Controls how to read messages written\\n\",\n    \"            transactionally.\\n\",\n    \"\\n\",\n    \"            If set to ``read_committed``, :meth:`.getmany` will only return\\n\",\n    \"            transactional messages which have been committed.\\n\",\n    \"            If set to ``read_uncommitted`` (the default), :meth:`.getmany` will\\n\",\n    \"            return all messages, even transactional messages which have been\\n\",\n    \"            aborted.\\n\",\n    \"\\n\",\n    \"            Non-transactional messages will be returned unconditionally in\\n\",\n    \"            either mode.\\n\",\n    \"\\n\",\n    \"            Messages will always be returned in offset order. Hence, in\\n\",\n    \"            `read_committed` mode, :meth:`.getmany` will only return\\n\",\n    \"            messages up to the last stable offset (LSO), which is the one less\\n\",\n    \"            than the offset of the first open transaction. In particular any\\n\",\n    \"            messages appearing after messages belonging to ongoing transactions\\n\",\n    \"            will be withheld until the relevant transaction has been completed.\\n\",\n    \"            As a result, `read_committed` consumers will not be able to read up\\n\",\n    \"            to the high watermark when there are in flight transactions.\\n\",\n    \"            Further, when in `read_committed` the seek_to_end method will\\n\",\n    \"            return the LSO. See method docs below. Default: ``read_uncommitted``\\n\",\n    \"        sasl_mechanism (str): Authentication mechanism when security_protocol\\n\",\n    \"            is configured for ``SASL_PLAINTEXT`` or ``SASL_SSL``. Valid values are:\\n\",\n    \"            ``PLAIN``, ``GSSAPI``, ``SCRAM-SHA-256``, ``SCRAM-SHA-512``,\\n\",\n    \"            ``OAUTHBEARER``.\\n\",\n    \"            Default: ``PLAIN``\\n\",\n    \"        sasl_plain_username (str): username for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: None\\n\",\n    \"        sasl_plain_password (str): password for SASL ``PLAIN`` authentication.\\n\",\n    \"            Default: None\\n\",\n    \"        sasl_oauth_token_provider (~aiokafka.abc.AbstractTokenProvider): OAuthBearer token provider instance. (See :mod:`kafka.oauth.abstract`).\\n\",\n    \"            Default: None\\n\",\n    \"        key_serializer (Callable): used to convert user-supplied keys to bytes\\n\",\n    \"            If not :data:`None`, called as ``f(key),`` should return\\n\",\n    \"            :class:`bytes`.\\n\",\n    \"            Default: :data:`None`.\\n\",\n    \"        value_serializer (Callable): used to convert user-supplied message\\n\",\n    \"            values to :class:`bytes`. If not :data:`None`, called as\\n\",\n    \"            ``f(value)``, should return :class:`bytes`.\\n\",\n    \"            Default: :data:`None`.\\n\",\n    \"        acks (Any): one of ``0``, ``1``, ``all``. The number of acknowledgments\\n\",\n    \"            the producer requires the leader to have received before considering a\\n\",\n    \"            request complete. This controls the durability of records that are\\n\",\n    \"            sent. The following settings are common:\\n\",\n    \"\\n\",\n    \"            * ``0``: Producer will not wait for any acknowledgment from the server\\n\",\n    \"              at all. The message will immediately be added to the socket\\n\",\n    \"              buffer and considered sent. No guarantee can be made that the\\n\",\n    \"              server has received the record in this case, and the retries\\n\",\n    \"              configuration will not take effect (as the client won't\\n\",\n    \"              generally know of any failures). The offset given back for each\\n\",\n    \"              record will always be set to -1.\\n\",\n    \"            * ``1``: The broker leader will write the record to its local log but\\n\",\n    \"              will respond without awaiting full acknowledgement from all\\n\",\n    \"              followers. In this case should the leader fail immediately\\n\",\n    \"              after acknowledging the record but before the followers have\\n\",\n    \"              replicated it then the record will be lost.\\n\",\n    \"            * ``all``: The broker leader will wait for the full set of in-sync\\n\",\n    \"              replicas to acknowledge the record. This guarantees that the\\n\",\n    \"              record will not be lost as long as at least one in-sync replica\\n\",\n    \"              remains alive. This is the strongest available guarantee.\\n\",\n    \"\\n\",\n    \"            If unset, defaults to ``acks=1``. If `enable_idempotence` is\\n\",\n    \"            :data:`True` defaults to ``acks=all``\\n\",\n    \"        compression_type (str): The compression type for all data generated by\\n\",\n    \"            the producer. Valid values are ``gzip``, ``snappy``, ``lz4``, ``zstd``\\n\",\n    \"            or :data:`None`.\\n\",\n    \"            Compression is of full batches of data, so the efficacy of batching\\n\",\n    \"            will also impact the compression ratio (more batching means better\\n\",\n    \"            compression). Default: :data:`None`.\\n\",\n    \"        max_batch_size (int): Maximum size of buffered data per partition.\\n\",\n    \"            After this amount :meth:`send` coroutine will block until batch is\\n\",\n    \"            drained.\\n\",\n    \"            Default: 16384\\n\",\n    \"        linger_ms (int): The producer groups together any records that arrive\\n\",\n    \"            in between request transmissions into a single batched request.\\n\",\n    \"            Normally this occurs only under load when records arrive faster\\n\",\n    \"            than they can be sent out. However in some circumstances the client\\n\",\n    \"            may want to reduce the number of requests even under moderate load.\\n\",\n    \"            This setting accomplishes this by adding a small amount of\\n\",\n    \"            artificial delay; that is, if first request is processed faster,\\n\",\n    \"            than `linger_ms`, producer will wait ``linger_ms - process_time``.\\n\",\n    \"            Default: 0 (i.e. no delay).\\n\",\n    \"        partitioner (Callable): Callable used to determine which partition\\n\",\n    \"            each message is assigned to. Called (after key serialization):\\n\",\n    \"            ``partitioner(key_bytes, all_partitions, available_partitions)``.\\n\",\n    \"            The default partitioner implementation hashes each non-None key\\n\",\n    \"            using the same murmur2 algorithm as the Java client so that\\n\",\n    \"            messages with the same key are assigned to the same partition.\\n\",\n    \"            When a key is :data:`None`, the message is delivered to a random partition\\n\",\n    \"            (filtered to partitions with available leaders only, if possible).\\n\",\n    \"        max_request_size (int): The maximum size of a request. This is also\\n\",\n    \"            effectively a cap on the maximum record size. Note that the server\\n\",\n    \"            has its own cap on record size which may be different from this.\\n\",\n    \"            This setting will limit the number of record batches the producer\\n\",\n    \"            will send in a single request to avoid sending huge requests.\\n\",\n    \"            Default: 1048576.\\n\",\n    \"        enable_idempotence (bool): When set to :data:`True`, the producer will\\n\",\n    \"            ensure that exactly one copy of each message is written in the\\n\",\n    \"            stream. If :data:`False`, producer retries due to broker failures,\\n\",\n    \"            etc., may write duplicates of the retried message in the stream.\\n\",\n    \"            Note that enabling idempotence acks to set to ``all``. If it is not\\n\",\n    \"            explicitly set by the user it will be chosen. If incompatible\\n\",\n    \"            values are set, a :exc:`ValueError` will be thrown.\\n\",\n    \"            New in version 0.5.0.\\n\",\n    \"        sasl_oauth_token_provider (: class:`~aiokafka.abc.AbstractTokenProvider`):\\n\",\n    \"            OAuthBearer token provider instance. (See\\n\",\n    \"            :mod:`kafka.oauth.abstract`).\\n\",\n    \"            Default: :data:`None`\\n\",\n    \"        *topics (list(str)): optional list of topics to subscribe to. If not set,\\n\",\n    \"            call :meth:`.subscribe` or :meth:`.assign` before consuming records.\\n\",\n    \"            Passing topics directly is same as calling :meth:`.subscribe` API.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    async with anyio.create_task_group() as tg:\\n\",\n    \"        tg.start_soon(\\n\",\n    \"            lambda d: produce_messages(**d),\\n\",\n    \"            dict(msgs=msgs, topic=produce_topic, **kwargs),\\n\",\n    \"        )\\n\",\n    \"        tg.start_soon(\\n\",\n    \"            lambda d: consumes_messages(**d),\\n\",\n    \"            dict(\\n\",\n    \"                msgs_count=msgs_count,\\n\",\n    \"                topic=consume_topic,\\n\",\n    \"                **kwargs,\\n\",\n    \"            ),\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a5bae758\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# print(combine_params(combine_params(produce_and_consume_messages, AIOKafkaProducer), AIOKafkaConsumer).__doc__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a5f5f9c7\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'produce_and_consume_messages'})\\n\",\n      \"[INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'produce_and_consume_messages': 1}. \\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"036194e4ba554428911cd5ba1f585146\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"producing to 'produce_and_consume_messages':   0%|          | 0/120000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"37027e75a7a1462389ee8060129b144c\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"consuming from 'produce_and_consume_messages':   0%|          | 0/114000 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 5277...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 5277 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 4904...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 4904 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"produce_and_consume_messages\\\"], listener_port=9992\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    await produce_and_consume_messages(\\n\",\n    \"        produce_topic=\\\"produce_and_consume_messages\\\",\\n\",\n    \"        consume_topic=\\\"produce_and_consume_messages\\\",\\n\",\n    \"        msgs=msgs,\\n\",\n    \"        msgs_count=int(len(msgs) * 0.95),\\n\",\n    \"        bootstrap_servers=bootstrap_server,\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3a544d7b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_collapsible_admonition(\\n\",\n    \"    code_block: str, *, name: Optional[str] = None\\n\",\n    \") -> Markdown:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Generate a collapsible admonition containing a code block as an example.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        code_block: The code block to be included in the example.\\n\",\n    \"        name: Optional name or title for the example.\\n\",\n    \"            Default is None.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A Markdown object representing the collapsible admonition\\n\",\n    \"        with the provided code block.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    alt_name = \\\"\\\" if name is None else name\\n\",\n    \"    intro = f'This example contains the content of the file \\\"{alt_name}\\\":'\\n\",\n    \"    return Markdown(\\n\",\n    \"        f\\\"??? Example \\\\n\\\\n    {intro}\\\\n\\\\n\\\"\\n\",\n    \"        + textwrap.indent(f\\\"```python\\\\n{code_block}\\\\n```\\\", prefix=\\\"    \\\")\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2f3f3cbe\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"??? Example \\n\",\n       \"\\n\",\n       \"    This example contains the content of the file \\\"server.py\\\":\\n\",\n       \"\\n\",\n       \"    ```python\\n\",\n       \"    print('hello')\\n\",\n       \"    ```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"get_collapsible_admonition(\\\"print('hello')\\\", name=\\\"server.py\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2383b6b5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def source2markdown(o: Union[str, Callable[..., Any]]) -> Markdown:\\n\",\n    \"    \\\"\\\"\\\"Converts source code into Markdown for displaying it with Jupyter notebook\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        o: source code\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    s = inspect.getsource(o) if callable(o) else o\\n\",\n    \"    return Markdown(\\n\",\n    \"        f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"{s}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"30ecd10b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"def f():\\n\",\n       \"    pass\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def f():\\n\",\n    \"    pass\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"source2markdown(f)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"317e3f0c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def wait_for_get_url(\\n\",\n    \"    url: str, timeout: Optional[int] = None, **kwargs: Dict[str, Any]\\n\",\n    \") -> aiohttp.ClientResponse:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Asynchronously wait for a GET request to a specified URL with an optional timeout.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        url: The URL to send the GET request to.\\n\",\n    \"        timeout: Optional maximum number of seconds to wait\\n\",\n    \"            for a response. If not provided, there is no timeout. Default is None.\\n\",\n    \"        **kwargs: Additional keyword arguments to be passed to the tqdm progress bar,\\n\",\n    \"            if a timeout is provided.\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The aiohttp.ClientResponse response object for the GET request.\\n\",\n    \"\\n\",\n    \"    Raises:\\n\",\n    \"        TimeoutError: If the timeout is reached and the URL couldn't be fetched within\\n\",\n    \"            the specified time.\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    t0 = datetime.now()\\n\",\n    \"    if timeout is not None:\\n\",\n    \"        pbar = tqdm(total=timeout, **kwargs)\\n\",\n    \"    try:\\n\",\n    \"        async with aiohttp.ClientSession() as session:\\n\",\n    \"            while True:\\n\",\n    \"                try:\\n\",\n    \"                    async with session.get(url) as response:\\n\",\n    \"                        if timeout is not None:\\n\",\n    \"                            pbar.update(pbar.total - pbar.n)\\n\",\n    \"                        return response\\n\",\n    \"                except aiohttp.ClientConnectorError as e:\\n\",\n    \"                    if timeout is not None:\\n\",\n    \"                        if pbar.total - pbar.n > 1:\\n\",\n    \"                            pbar.update(1)\\n\",\n    \"                    await asyncio.sleep(1)\\n\",\n    \"\\n\",\n    \"                if timeout is not None and datetime.now() - t0 >= timedelta(\\n\",\n    \"                    seconds=timeout\\n\",\n    \"                ):\\n\",\n    \"                    raise TimeoutError(\\n\",\n    \"                        f\\\"Could not fetch url '{url}' for more than {timeout} seconds\\\"\\n\",\n    \"                    )\\n\",\n    \"    finally:\\n\",\n    \"        if timeout is not None:\\n\",\n    \"            pbar.close()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6f284dbc\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"62d27726c5ae46cfbf655d30826dff78\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"should pass:   0%|          | 0/5 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"application/vnd.jupyter.widget-view+json\": {\n       \"model_id\": \"4f2e4486bcb94cbdaf3eb374e613a8b7\",\n       \"version_major\": 2,\n       \"version_minor\": 0\n      },\n      \"text/plain\": [\n       \"expected to fail:   0%|          | 0/5 [00:00<?, ?it/s]\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<ExceptionInfo TimeoutError(\\\"Could not fetch url 'https://0.0.0.0:4000' for more than 5 seconds\\\") tblen=2>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"await wait_for_get_url(\\\"https://python.org\\\", timeout=5, desc=\\\"should pass\\\")\\n\",\n    \"\\n\",\n    \"with pytest.raises(TimeoutError) as e:\\n\",\n    \"    await wait_for_get_url(\\\"https://0.0.0.0:4000\\\", timeout=5, desc=\\\"expected to fail\\\")\\n\",\n    \"e\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a54b935c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"17ebec05\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/Logger.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e740dfbe\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | default_exp _components.logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"62377675\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"8efd915a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"import logging\\n\",\n    \"import logging.config\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import true_after\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ab182545\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | include: false\\n\",\n    \"\\n\",\n    \"import time\\n\",\n    \"import unittest\\n\",\n    \"\\n\",\n    \"import pytest\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4a4dfedf\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"# Logger Levels\\n\",\n    \"# CRITICAL = 50\\n\",\n    \"# ERROR = 40\\n\",\n    \"# WARNING = 30\\n\",\n    \"# INFO = 20\\n\",\n    \"# DEBUG = 10\\n\",\n    \"# NOTSET = 0\\n\",\n    \"\\n\",\n    \"should_suppress_timestamps: bool = False\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def suppress_timestamps(flag: bool = True) -> None:\\n\",\n    \"    \\\"\\\"\\\"Suppress logger timestamp\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        flag: If not set, then the default value **True** will be used to suppress the timestamp\\n\",\n    \"            from the logger messages\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    global should_suppress_timestamps\\n\",\n    \"    should_suppress_timestamps = flag\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_default_logger_configuration(level: int = logging.INFO) -> Dict[str, Any]:\\n\",\n    \"    \\\"\\\"\\\"Return the common configurations for the logger\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        level: Logger level to set\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        A dict with default logger configuration\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    global should_suppress_timestamps\\n\",\n    \"\\n\",\n    \"    if should_suppress_timestamps:\\n\",\n    \"        FORMAT = \\\"[%(levelname)s] %(name)s: %(message)s\\\"\\n\",\n    \"    else:\\n\",\n    \"        FORMAT = \\\"%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s\\\"\\n\",\n    \"\\n\",\n    \"    DATE_FMT = \\\"%y-%m-%d %H:%M:%S\\\"\\n\",\n    \"\\n\",\n    \"    LOGGING_CONFIG = {\\n\",\n    \"        \\\"version\\\": 1,\\n\",\n    \"        \\\"disable_existing_loggers\\\": False,\\n\",\n    \"        \\\"formatters\\\": {\\n\",\n    \"            \\\"standard\\\": {\\\"format\\\": FORMAT, \\\"datefmt\\\": DATE_FMT},\\n\",\n    \"        },\\n\",\n    \"        \\\"handlers\\\": {\\n\",\n    \"            \\\"default\\\": {\\n\",\n    \"                \\\"level\\\": level,\\n\",\n    \"                \\\"formatter\\\": \\\"standard\\\",\\n\",\n    \"                \\\"class\\\": \\\"logging.StreamHandler\\\",\\n\",\n    \"                \\\"stream\\\": \\\"ext://sys.stdout\\\",  # Default is stderr\\n\",\n    \"            },\\n\",\n    \"        },\\n\",\n    \"        \\\"loggers\\\": {\\n\",\n    \"            \\\"\\\": {\\\"handlers\\\": [\\\"default\\\"], \\\"level\\\": level},  # root logger\\n\",\n    \"        },\\n\",\n    \"    }\\n\",\n    \"    return LOGGING_CONFIG\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"251df829\",\n   \"metadata\": {},\n   \"source\": [\n    \"Example on how to use **get_default_logger_configuration** function\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6e725745\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'version': 1,\\n\",\n       \" 'disable_existing_loggers': False,\\n\",\n       \" 'formatters': {'standard': {'format': '%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s',\\n\",\n       \"   'datefmt': '%y-%m-%d %H:%M:%S'}},\\n\",\n       \" 'handlers': {'default': {'level': 20,\\n\",\n       \"   'formatter': 'standard',\\n\",\n       \"   'class': 'logging.StreamHandler',\\n\",\n       \"   'stream': 'ext://sys.stdout'}},\\n\",\n       \" 'loggers': {'': {'handlers': ['default'], 'level': 20}}}\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# collapse_output\\n\",\n    \"\\n\",\n    \"get_default_logger_configuration()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"718c810d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'version': 1,\\n\",\n       \" 'disable_existing_loggers': False,\\n\",\n       \" 'formatters': {'standard': {'format': '%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s',\\n\",\n       \"   'datefmt': '%y-%m-%d %H:%M:%S'}},\\n\",\n       \" 'handlers': {'default': {'level': 20,\\n\",\n       \"   'formatter': 'standard',\\n\",\n       \"   'class': 'logging.StreamHandler',\\n\",\n       \"   'stream': 'ext://sys.stdout'}},\\n\",\n       \" 'loggers': {'': {'handlers': ['default'], 'level': 20}}}\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | include: false\\n\",\n    \"\\n\",\n    \"expected = {\\n\",\n    \"    \\\"version\\\": 1,\\n\",\n    \"    \\\"disable_existing_loggers\\\": False,\\n\",\n    \"    \\\"formatters\\\": {\\n\",\n    \"        \\\"standard\\\": {\\n\",\n    \"            \\\"format\\\": \\\"%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s\\\",\\n\",\n    \"            \\\"datefmt\\\": \\\"%y-%m-%d %H:%M:%S\\\",\\n\",\n    \"        }\\n\",\n    \"    },\\n\",\n    \"    \\\"handlers\\\": {\\n\",\n    \"        \\\"default\\\": {\\n\",\n    \"            \\\"level\\\": 20,\\n\",\n    \"            \\\"formatter\\\": \\\"standard\\\",\\n\",\n    \"            \\\"class\\\": \\\"logging.StreamHandler\\\",\\n\",\n    \"            \\\"stream\\\": \\\"ext://sys.stdout\\\",\\n\",\n    \"        }\\n\",\n    \"    },\\n\",\n    \"    \\\"loggers\\\": {\\\"\\\": {\\\"handlers\\\": [\\\"default\\\"], \\\"level\\\": 20}},\\n\",\n    \"}\\n\",\n    \"actual = get_default_logger_configuration()\\n\",\n    \"assert actual == expected\\n\",\n    \"actual\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f19186ba\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"logger_spaces_added: List[str] = []\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_logger(\\n\",\n    \"    name: str, *, level: int = logging.INFO, add_spaces: bool = True\\n\",\n    \") -> logging.Logger:\\n\",\n    \"    \\\"\\\"\\\"Return the logger class with default logging configuration.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        name: Pass the __name__ variable as name while calling\\n\",\n    \"        level: Used to configure logging, default value `logging.INFO` logs\\n\",\n    \"            info messages and up.\\n\",\n    \"        add_spaces:\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        The logging.Logger class with default/custom logging configuration\\n\",\n    \"\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    config = get_default_logger_configuration(level=level)\\n\",\n    \"    logging.config.dictConfig(config)\\n\",\n    \"\\n\",\n    \"    logger = logging.getLogger(name)\\n\",\n    \"    #     stack_size = len(traceback.extract_stack())\\n\",\n    \"    #     def add_spaces_f(f):\\n\",\n    \"    #         def f_with_spaces(msg, *args, **kwargs):\\n\",\n    \"    #             cur_stack_size = len(traceback.extract_stack())\\n\",\n    \"    #             msg = \\\" \\\"*(cur_stack_size-stack_size)*2 + msg\\n\",\n    \"    #             return f(msg, *args, **kwargs)\\n\",\n    \"    #         return f_with_spaces\\n\",\n    \"\\n\",\n    \"    #     if name not in logger_spaces_added and add_spaces:\\n\",\n    \"    #         logger.debug = add_spaces_f(logger.debug) # type: ignore\\n\",\n    \"    #         logger.info = add_spaces_f(logger.info) # type: ignore\\n\",\n    \"    #         logger.warning = add_spaces_f(logger.warning) # type: ignore\\n\",\n    \"    #         logger.error = add_spaces_f(logger.error) # type: ignore\\n\",\n    \"    #         logger.critical = add_spaces_f(logger.critical) # type: ignore\\n\",\n    \"    #         logger.exception = add_spaces_f(logger.exception) # type: ignore\\n\",\n    \"\\n\",\n    \"    #         logger_spaces_added.append(name)\\n\",\n    \"\\n\",\n    \"    return logger\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7fea37e9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | include: false\\n\",\n    \"\\n\",\n    \"assert type(get_logger(__name__)) == logging.Logger\\n\",\n    \"\\n\",\n    \"with pytest.raises(TypeError) as e:\\n\",\n    \"    get_logger()\\n\",\n    \"assert \\\"missing 1 required positional argument\\\" in str(e.value)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3c6a2ae0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-08-08 04:32:22.060 [INFO] __main__: hello\\n\",\n      \"23-08-08 04:32:22.061 [INFO] __main__: hello\\n\",\n      \"23-08-08 04:32:22.061 [INFO] __main__: hello\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"logger = get_logger(__name__)\\n\",\n    \"logger.info(\\\"hello\\\")\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"logger.info(\\\"hello\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def f():\\n\",\n    \"    logger.info(\\\"hello\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"f()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"390214db\",\n   \"metadata\": {},\n   \"source\": [\n    \"Example on how to use **get_logger** function\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"90767ccb\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-08-08 04:32:22.067 [INFO] __main__: info\\n\",\n      \"23-08-08 04:32:22.067 [WARNING] __main__: Warning\\n\",\n      \"23-08-08 04:32:22.068 [ERROR] __main__: Error\\n\",\n      \"23-08-08 04:32:22.068 [CRITICAL] __main__: Critical\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# collapse_output\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"\\n\",\n    \"logger.debug(\\\"Debug\\\")\\n\",\n    \"logger.info(\\\"info\\\")\\n\",\n    \"logger.warning(\\\"Warning\\\")\\n\",\n    \"logger.error(\\\"Error\\\")\\n\",\n    \"logger.critical(\\\"Critical\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ede2ce1f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: info\\n\",\n      \"[WARNING] __main__: Warning\\n\",\n      \"[ERROR] __main__: Error\\n\",\n      \"[CRITICAL] __main__: Critical\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# collapse_output\\n\",\n    \"\\n\",\n    \"suppress_timestamps()\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"\\n\",\n    \"logger.debug(\\\"Debug\\\")\\n\",\n    \"logger.info(\\\"info\\\")\\n\",\n    \"logger.warning(\\\"Warning\\\")\\n\",\n    \"logger.error(\\\"Error\\\")\\n\",\n    \"logger.critical(\\\"Critical\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1e791150\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def set_level(level: int) -> None:\\n\",\n    \"    \\\"\\\"\\\"Set logger level\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        level: Logger level to set\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"    # Getting all loggers that has either fastkafka or __main__ in the name\\n\",\n    \"    loggers = [\\n\",\n    \"        logging.getLogger(name)\\n\",\n    \"        for name in logging.root.manager.loggerDict\\n\",\n    \"        if (\\\"fastkafka\\\" in name) or (\\\"__main__\\\" in name)\\n\",\n    \"    ]\\n\",\n    \"\\n\",\n    \"    for logger in loggers:\\n\",\n    \"        logger.setLevel(level)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5db8d01f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"40\\n\",\n      \"[ERROR] __main__: This is an error\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"level = logging.ERROR\\n\",\n    \"\\n\",\n    \"set_level(level)\\n\",\n    \"\\n\",\n    \"# Checking if the logger is set back to logging.WARNING in dev mode\\n\",\n    \"print(logger.getEffectiveLevel())\\n\",\n    \"assert logger.getEffectiveLevel() == level\\n\",\n    \"\\n\",\n    \"logger.debug(\\\"This is a debug message\\\")\\n\",\n    \"logger.info(\\\"This is an info\\\")\\n\",\n    \"logger.warning(\\\"This is a warning\\\")\\n\",\n    \"logger.error(\\\"This is an error\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b9c2d1b0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] __main__: something\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Reset log level back to info\\n\",\n    \"level = logging.INFO\\n\",\n    \"\\n\",\n    \"set_level(level)\\n\",\n    \"logger.info(\\\"something\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ce1b40e5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"int\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"type(logging.INFO)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"04cf2ffb\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | export\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def cached_log(\\n\",\n    \"    self: logging.Logger,\\n\",\n    \"    msg: str,\\n\",\n    \"    level: int,\\n\",\n    \"    timeout: Union[int, float] = 5,\\n\",\n    \"    log_id: Optional[str] = None,\\n\",\n    \") -> None:\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    Logs a message with a specified level only once within a given timeout.\\n\",\n    \"\\n\",\n    \"    Args:\\n\",\n    \"        self: The logger instance.\\n\",\n    \"        msg: The message to log.\\n\",\n    \"        level: The logging level for the message.\\n\",\n    \"        timeout: The timeout duration in seconds.\\n\",\n    \"        log_id: Id of the log to timeout for timeout time, if None, msg will be used as log_id\\n\",\n    \"\\n\",\n    \"    Returns:\\n\",\n    \"        None\\n\",\n    \"    \\\"\\\"\\\"\\n\",\n    \"    if not hasattr(self, \\\"_timeouted_msgs\\\"):\\n\",\n    \"        self._timeouted_msgs = {}  # type: ignore\\n\",\n    \"        \\n\",\n    \"    key = msg if log_id is None else log_id\\n\",\n    \"\\n\",\n    \"    if msg not in self._timeouted_msgs or self._timeouted_msgs[key]():  # type: ignore\\n\",\n    \"        self._timeouted_msgs[key] = true_after(timeout)  # type: ignore\\n\",\n    \"\\n\",\n    \"        self.log(level, msg)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"65185f0f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with unittest.mock.patch(\\\"logging.Logger.log\\\") as mock:\\n\",\n    \"    for i in range(3 * 5 - 2):\\n\",\n    \"        cached_log(logger, \\\"log me!\\\", level=logging.INFO, timeout=1)\\n\",\n    \"        time.sleep(0.2)\\n\",\n    \"\\n\",\n    \"    assert mock.call_args_list == [unittest.mock.call(20, \\\"log me!\\\")] * 3\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/_quarto.yml",
    "content": "project:\n  type: website\n\nformat:\n  html:\n    theme: cosmo\n    css: styles.css\n    toc: true\n\nwebsite:\n  twitter-card: true\n  open-graph: true\n  repo-actions: [issue]\n  navbar:\n    background: primary\n    search: true\n  sidebar:\n    style: floating\n\nmetadata-files: [nbdev.yml, sidebar.yml]\n"
  },
  {
    "path": "nbs/guides/.gitignore",
    "content": "application.py\nfast_apps.py\napplication_test.py \n"
  },
  {
    "path": "nbs/guides/Guide_00_FastKafka_Demo.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# FastKafka tutorial\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use Python library for building asynchronous services that interact with Kafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/), [AIOKafka](https://github.com/aio-libs/aiokafka) and [AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process of writing producers and consumers for Kafka topics, handling all the parsing, networking, task scheduling and data generation automatically. With FastKafka, you can quickly prototype and develop high-performance Kafka-based services with minimal code, making it an ideal choice for developers looking to streamline their workflow and accelerate their projects.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Install\\n\",\n    \"\\n\",\n    \"FastKafka works on macOS, Linux, and most Unix-style operating systems. You can install it with `pip` as usual:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"pip install fastkafka\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"try:\\n\",\n    \"    import fastkafka\\n\",\n    \"except:\\n\",\n    \"    ! pip install fastkafka\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Running in Colab\\n\",\n    \"\\n\",\n    \"You can start this interactive tutorial in Google Colab by clicking the button below:\\n\",\n    \"\\n\",\n    \"<a href=\\\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb\\\" target=”_blank”>\\n\",\n    \"  <img src=\\\"https://colab.research.google.com/assets/colab-badge.svg\\\" alt=\\\"Open In Colab\\\" />\\n\",\n    \"</a>\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Writing server code\\n\",\n    \"\\n\",\n    \"Here is an example python script using FastKafka that takes data from a Kafka topic, makes a prediction using a predictive model, and outputs the prediction to another Kafka topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Preparing the demo model\\n\",\n    \"\\n\",\n    \"First we will prepare our model using the Iris dataset so that we can demonstrate the preditions using FastKafka. The following call downloads the dataset and trains the model.\\n\",\n    \"\\n\",\n    \"We will wrap the model creation into a lifespan of our app so that the model is created just before the app is started.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Messages\\n\",\n    \"\\n\",\n    \"FastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input JSON-encoded data into Python objects, making it easy to work with structured data in your Kafka-based applications. Pydantic's [`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you to define messages using a declarative syntax, making it easy to specify the fields and types of your messages.\\n\",\n    \"\\n\",\n    \"This example defines two message classes for use in a FastKafka application:\\n\",\n    \"\\n\",\n    \"- The `IrisInputData` class is used to represent input data for a predictive model. It has four fields of type [`NonNegativeFloat`](https://docs.pydantic.dev/usage/types/#constrained-types), which is a subclass of float that only allows non-negative floating point values.\\n\",\n    \"\\n\",\n    \"- The `IrisPrediction` class is used to represent the output of the predictive model. It has a single field `species` of type string representing the predicted species.\\n\",\n    \"\\n\",\n    \"These message classes will be used to parse and validate incoming data in Kafka consumers and producers.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Application\\n\",\n    \"\\n\",\n    \"This example shows how to initialize a FastKafka application.\\n\",\n    \"\\n\",\n    \"It starts by defining  a dictionary called `kafka_brokers`, which contains two entries: `\\\"localhost\\\"` and `\\\"production\\\"`, specifying local development and production Kafka brokers. Each entry specifies the URL, port, and other details of a Kafka broker. This dictionary is used for generating the documentation only and it is not being checked by the actual server.\\n\",\n    \"\\n\",\n    \"Next, an object of the `FastKafka` class is initialized with the minimum set of arguments:\\n\",\n    \"\\n\",\n    \"- `kafka_brokers`: a dictionary used for generation of documentation\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Function decorators\\n\",\n    \"\\n\",\n    \"FastKafka provides convenient function decorators `@kafka_app.consumes` and `@kafka_app.produces` to allow you to delegate the actual process of\\n\",\n    \"\\n\",\n    \"- consuming and producing data to Kafka, and\\n\",\n    \"\\n\",\n    \"- decoding and encoding JSON encode messages\\n\",\n    \"\\n\",\n    \"from user defined functions to the framework. The FastKafka framework delegates these jobs to AIOKafka and Pydantic libraries.\\n\",\n    \"\\n\",\n    \"These decorators make it easy to specify the processing logic for your Kafka consumers and producers, allowing you to focus on the core business logic of your application without worrying about the underlying Kafka integration.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"This following example shows how to use the `@kafka_app.consumes` and `@kafka_app.produces` decorators in a FastKafka application:\\n\",\n    \"\\n\",\n    \"- The `@kafka_app.consumes` decorator is applied to the `on_input_data` function, which specifies that this function should be called whenever a message is received on the \\\"input_data\\\" Kafka topic. The `on_input_data` function takes a single argument which is expected to be an instance of the `IrisInputData` message class. Specifying the type of the single argument is instructing the Pydantic to use `IrisInputData.parse_raw()` on the consumed message before passing it to the user defined function `on_input_data`.\\n\",\n    \"\\n\",\n    \"- The `@produces` decorator is applied to the `to_predictions` function, which specifies that this function should produce a message to the \\\"predictions\\\" Kafka topic whenever it is called. The `to_predictions` function takes a single integer argument `species_class` representing one of three possible strign values predicted by the mdoel. It creates a new `IrisPrediction` message using this value and then returns it. The framework will call the `IrisPrediction.json().encode(\\\"utf-8\\\")` function on the returned value and produce it to the specified topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Testing the service\\n\",\n    \"\\n\",\n    \"The service can be tested using the `Tester` instances which internally starts Kafka broker and zookeeper.\\n\",\n    \"\\n\",\n    \"Before running tests, we have to install Java runtime and Apache Kafka locally. To simplify the process, we provide the following convenience command:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka testing install_deps\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\r\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\r\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\r\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\r\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! fastkafka testing install_deps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\r\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\r\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\r\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\r\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! fastkafka testing install_deps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"msg = IrisInputData(\\n\",\n    \"    sepal_length=0.1,\\n\",\n    \"    sepal_width=0.2,\\n\",\n    \"    petal_length=0.3,\\n\",\n    \"    petal_width=0.4,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"# Start Tester app and create local Kafka broker for testing\\n\",\n    \"async with Tester(kafka_app) as tester:\\n\",\n    \"    # Send IrisInputData message to input_data topic\\n\",\n    \"    await tester.to_input_data(msg)\\n\",\n    \"\\n\",\n    \"    # Assert that the kafka_app responded with IrisPrediction in predictions topic\\n\",\n    \"    await tester.awaited_mocks.on_predictions.assert_awaited_with(\\n\",\n    \"        IrisPrediction(species=\\\"setosa\\\"), timeout=2\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Recap\\n\",\n    \"\\n\",\n    \"We have created a Iris classification model and encapulated it into our fastkafka application.\\n\",\n    \"The app will consume the IrisInputData from the `input_data` topic and produce the predictions to `predictions` topic.\\n\",\n    \"\\n\",\n    \"To test the app we have:\\n\",\n    \"\\n\",\n    \"1. Created the app\\n\",\n    \"\\n\",\n    \"2. Started our Tester class which mirrors the developed app topics for testing purpuoses\\n\",\n    \"\\n\",\n    \"3. Sent IrisInputData message to `input_data` topic\\n\",\n    \"\\n\",\n    \"4. Asserted and checked that the developed iris classification service has reacted to IrisInputData message \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Running the service\\n\",\n    \"\\n\",\n    \"The service can be started using builtin `faskafka run` CLI command. Before we can do that, we will concatenate the code snippets from above and save them in a file `\\\"application.py\\\"`\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"# content of the \\\"application.py\\\" file\\n\",\n       \"\\n\",\n       \"from contextlib import asynccontextmanager\\n\",\n       \"\\n\",\n       \"from sklearn.datasets import load_iris\\n\",\n       \"from sklearn.linear_model import LogisticRegression\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"ml_models = {}\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@asynccontextmanager\\n\",\n       \"async def lifespan(app: FastKafka):\\n\",\n       \"    # Load the ML model\\n\",\n       \"    X, y = load_iris(return_X_y=True)\\n\",\n       \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n       \"        X, y\\n\",\n       \"    )\\n\",\n       \"    yield\\n\",\n       \"    # Clean up the ML models and release the resources\\n\",\n       \"    ml_models.clear()\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n       \"\\n\",\n       \"class IrisInputData(BaseModel):\\n\",\n       \"    sepal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    sepal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"class IrisPrediction(BaseModel):\\n\",\n       \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n       \"    \\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"    },\\n\",\n       \"    \\\"production\\\": {\\n\",\n       \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n       \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n       \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Iris predictions\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \"    lifespan=lifespan,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n       \"async def on_input_data(msg: IrisInputData):\\n\",\n       \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n       \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n       \"    )[0]\\n\",\n       \"\\n\",\n       \"    to_predictions(species_class)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n       \"def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n       \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n       \"\\n\",\n       \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n       \"    return prediction\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"from IPython.display import Markdown\\n\",\n    \"\\n\",\n    \"kafka_app_source = \\\"\\\"\\\"\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"    \\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"with open(\\\"application.py\\\", \\\"w\\\") as source:\\n\",\n    \"    source.write(kafka_app_source)\\n\",\n    \"\\n\",\n    \"Markdown(\\n\",\n    \"    f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file\\n\",\n    \"{kafka_app_source}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"from application import kafka_app\\n\",\n    \"\\n\",\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"msg = IrisInputData(\\n\",\n    \"    sepal_length=0.1,\\n\",\n    \"    sepal_width=0.2,\\n\",\n    \"    petal_length=0.3,\\n\",\n    \"    petal_width=0.4,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"# Start Tester app and create InMemory Kafka broker for testing - application.py\\n\",\n    \"async with Tester(kafka_app) as tester:\\n\",\n    \"    # Send IrisInputData message to input_data topic - application.py\\n\",\n    \"    await tester.to_input_data(msg)\\n\",\n    \"\\n\",\n    \"    # Assert that the kafka_app responded with IrisPrediction in predictions topic - application.py\\n\",\n    \"    await tester.awaited_mocks.on_predictions.assert_awaited_with(\\n\",\n    \"        IrisPrediction(species=\\\"setosa\\\"), timeout=30\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"To run the service, you will need a running Kafka broker on localhost as specified in the `kafka_brokers` parameter above. We can start the Kafka broker locally using the `ApacheKafkaBroker`. Notice that the same happens automatically in the `Tester` as shown above.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"'127.0.0.1:9092'\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker\\n\",\n    \"\\n\",\n    \"broker = ApacheKafkaBroker(apply_nest_asyncio=True)\\n\",\n    \"\\n\",\n    \"broker.start()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"Then, we start the FastKafka service by running the following command in the folder where the `application.py` file is located:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"In the above command, we use `--num-workers` option to specify how many workers to launch and we use `--kafka-broker` option to specify which kafka broker configuration to use from earlier specified `kafka_brokers`\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\\n\",\n      \"[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\\n\",\n      \"[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\\n\",\n      \"[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\\n\",\n      \"[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\\n\",\n      \"[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\\n\",\n      \"[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \\n\",\n      \"[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\\n\",\n      \"[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\\n\",\n      \"[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\\n\",\n      \"[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\\n\",\n      \"[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \\n\",\n      \"[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\\n\",\n      \"[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\\n\",\n      \"[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\\n\",\n      \"[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\\n\",\n      \"^C\\n\",\n      \"[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\\n\",\n      \"[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\\n\",\n      \"[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"!fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"You need to interupt running of the cell above by selecting `Runtime->Interupt execution` on the toolbar above.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"Finally, we can stop the local Kafka Broker:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"broker.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Documentation\\n\",\n    \"\\n\",\n    \"The kafka app comes with builtin documentation generation using [AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\\n\",\n    \"\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"When running in Colab, we need to update Node.js first:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"try:\\n\",\n    \"    import google.colab\\n\",\n    \"\\n\",\n    \"    !npm install -g n\\n\",\n    \"    !n lts\\n\",\n    \"except:\\n\",\n    \"    pass\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"We need to install all dependancies for the generator using the following command line:\\n\",\n    \"```sh\\n\",\n    \"fastkafka docs install_deps\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\\r\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! fastkafka docs install_deps\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"To generate the documentation programatically you just need to call the folloving command:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka docs generate application:kafka_app\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/work/fastkafka/nbs/guides/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! fastkafka docs generate application:kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \". This will generate the *asyncapi* folder in relative path where all your documentation will be saved. You can check out the content of it with:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"ls -l asyncapi\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"total 8\\r\\n\",\n      \"drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\\r\\n\",\n      \"drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\\r\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! ls -l asyncapi\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"In docs folder you will find the servable static html file of your documentation. This can also be served using our `fastkafka docs serve` CLI command (more on that in our guides).\\n\",\n    \"\\n\",\n    \"In spec folder you will find a asyncapi.yml file containing the async API specification of your application. \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"We can locally preview the generated documentation by running the following command:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka docs serve application:kafka_app\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\\n\",\n      \"[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/work/fastkafka/nbs/guides/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"Serving documentation on http://127.0.0.1:8000\\u001b[0m\\n\",\n      \"^C\\n\",\n      \"Interupting serving of documentation and cleaning up...\\u001b[0m\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"!fastkafka docs serve application:kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"From the parameters passed to the application constructor, we get the documentation bellow:\\n\",\n    \"```python\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    bootstrap_servers=\\\"localhost:9092\\\",\\n\",\n    \")\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"The following documentation snippet are for the consumer as specified in the code above:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"The following documentation snippet are for the producer as specified in the code above:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"Finally, all messages as defined as subclasses of *BaseModel* are documented as well: \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"![Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 1\n}\n"
  },
  {
    "path": "nbs/guides/Guide_01_Intro.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f48d0afd\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Intro\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8c6faae1\",\n   \"metadata\": {},\n   \"source\": [\n    \"This tutorial will show you how to use <b>FastKafkaAPI</b>, step by step.\\n\",\n    \"\\n\",\n    \"The goal of FastKafkaAPI is to simplify the use of Apache Kafka in Python inspired by FastAPI look and feel.\\n\",\n    \"\\n\",\n    \"In this Intro tutorial we'll go trough the basic requirements to run the demos presented in future steps.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3943a6d2\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Installing FastKafkaAPI\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a04caa3f\",\n   \"metadata\": {},\n   \"source\": [\n    \"First step is to install FastKafkaAPI\\n\",\n    \"\\n\",\n    \"```shell\\n\",\n    \"$ pip install fastkafka\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e99120de\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Preparing a Kafka broker\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9019cf73\",\n   \"metadata\": {},\n   \"source\": [\n    \"Next step is to prepare the Kafka environment, our consumers and producers will need some channel of communication.\\n\",\n    \"\\n\",\n    \"!!! info \\\\\\\"Hey, your first info!\\\\\\\"\\n\",\n    \"\\n\",\n    \"    If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \\n\",\n    \"\\n\",\n    \"To go through the tutorial, we recommend that you use dockerized Kafka brokers, if you have Docker and docker-compose installed the setup should take you no time (if we exclude the container download times).\\n\",\n    \"\\n\",\n    \"!!! warning \\\\\\\"Listen! This is important.\\\\\\\"\\n\",\n    \"\\n\",\n    \"    To be able to setup this configuration you need to have Docker and docker-compose installed\\n\",\n    \"    \\n\",\n    \"    See here for more info on <a href = \\\\\\\"https://docs.docker.com/\\\\\\\" target=\\\\\\\"_blank\\\\\\\">Docker</a> and <a href = \\\\\\\"https://docs.docker.com/compose/install/\\\\\\\" target=\\\\\\\"_blank\\\\\\\">docker compose</a>\\n\",\n    \"\\n\",\n    \"To setup the recommended environment, first, create a new folder wher you want to save your demo files (e.g. fastkafka_demo).\\n\",\n    \"Inside the new folder create a new YAML file named <b>kafka_demo.yml</b> and copy the following configuration into it:\\n\",\n    \"\\n\",\n    \"``` yaml\\n\",\n    \"version: \\\"3\\\"\\n\",\n    \"services:\\n\",\n    \"    zookeeper:\\n\",\n    \"        image: wurstmeister/zookeeper\\n\",\n    \"        hostname: zookeeper\\n\",\n    \"        container_name: zookeeper\\n\",\n    \"        networks:\\n\",\n    \"          - fastkafka-network\\n\",\n    \"        ports:\\n\",\n    \"          - \\\"2181:2181\\\"\\n\",\n    \"          - \\\"22:22\\\"\\n\",\n    \"          - \\\"2888:2888\\\"\\n\",\n    \"          - \\\"3888:3888\\\"\\n\",\n    \"    kafka:\\n\",\n    \"        image: wurstmeister/kafka\\n\",\n    \"        container_name: kafka\\n\",\n    \"        ports:\\n\",\n    \"          - \\\"9093:9093\\\"\\n\",\n    \"        environment:\\n\",\n    \"            HOSTNAME_COMMAND: \\\"docker info | grep ^Name: | cut -d' ' -f 2\\\"\\n\",\n    \"            KAFKA_ZOOKEEPER_CONNECT: \\\"zookeeper:2181\\\"\\n\",\n    \"            KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\\n\",\n    \"            KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\\n\",\n    \"            KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\\n\",\n    \"            KAFKA_INTER_BROKER_LISTENER_NAME: INTER\\n\",\n    \"            KAFKA_CREATE_TOPICS: \\\"hello:1:1\\\"\\n\",\n    \"        volumes:\\n\",\n    \"            - /var/run/docker.sock:/var/run/docker.sock\\n\",\n    \"        depends_on:\\n\",\n    \"            - zookeeper\\n\",\n    \"        healthcheck:\\n\",\n    \"            test: [ \\\"CMD\\\", \\\"kafka-topics.sh\\\", \\\"--list\\\", \\\"--zookeeper\\\", \\\"zookeeper:2181\\\" ]\\n\",\n    \"            interval: 5s\\n\",\n    \"            timeout: 10s\\n\",\n    \"            retries: 5\\n\",\n    \"        networks:\\n\",\n    \"          - fastkafka-network\\n\",\n    \"networks:\\n\",\n    \"    fastkafka-network:\\n\",\n    \"        name: \\\"fastkafka-network\\\"\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This configuration will start a single instance of Zookeeper, single instance of Kafka broker and create a 'hello' topic (quite enough for a start).\\n\",\n    \"To start the configuration, run: \\n\",\n    \"```shell\\n\",\n    \"$ docker-compose -f kafka_demo.yaml up -d --wait\\n\",\n    \"```\\n\",\n    \"This will start the necessary containers and wait till they report that they are Healthy. After the command finishes, you are good to go to try out the FastKafkaAPI capabilities! :confetti_ball:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4947af09\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Running the code\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3eb41e6f\",\n   \"metadata\": {},\n   \"source\": [\n    \"After installing FastKafkaAPI and initialising the Kafka broker you can proceed to the 'First Steps' part of the tutorial. There, you will write your first Kafka client and producer apps, run them, and interact with them.\\n\",\n    \"\\n\",\n    \"You are highly encouraged to follow along the tutorials not just by reading trough them but by implementing the code examples in your own environment. This will not only help you remember the use cases better but also, hopefully, demonstrate to you the ease of use of this library.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f9d16a4c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_02_First_Steps.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f48d0afd\",\n   \"metadata\": {},\n   \"source\": [\n    \"# First Steps\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fd434418\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Creating a simple Kafka consumer app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1482b8b9\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import os\\n\",\n    \"import platform\\n\",\n    \"\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4a18cdd7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"# | notest\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"75545b26\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1073bb3b\",\n   \"metadata\": {},\n   \"source\": [\n    \"For our first demo we will create the simplest possible Kafka consumer and run it using 'fastkafka run' command.\\n\",\n    \"\\n\",\n    \"The consumer will:\\n\",\n    \"\\n\",\n    \"1. Connect to the Kafka Broker we setup in the Intro guide\\n\",\n    \"\\n\",\n    \"2. Listen to the hello topic\\n\",\n    \"\\n\",\n    \"3. Write any message received from the hello topic to stdout\\n\",\n    \"    \\n\",\n    \"To create the consumer, first, create a file named <b>hello_kafka_consumer.py</b> and copy the following code to it:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"825bf08a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"from os import environ\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"kafka_server_url = environ[\\\"KAFKA_HOSTNAME\\\"]\\n\",\n       \"kafka_server_port = environ[\\\"KAFKA_PORT\\\"]\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"description\\\": \\\"local development kafka\\\",\\n\",\n       \"        \\\"url\\\": kafka_server_url,\\n\",\n       \"        \\\"port\\\": kafka_server_port\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"class HelloKafkaMsg(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    kafka_brokers=kafka_brokers\\n\",\n       \")\\n\",\n       \"    \\n\",\n       \"@kafka_app.consumes()\\n\",\n       \"async def on_hello(msg: HelloKafkaMsg):\\n\",\n       \"    print(f\\\"Got data, msg={msg.msg}\\\", flush=True)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"consumer_script = \\\"\\\"\\\"\\n\",\n    \"from os import environ\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"kafka_server_url = environ[\\\"KAFKA_HOSTNAME\\\"]\\n\",\n    \"kafka_server_port = environ[\\\"KAFKA_PORT\\\"]\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"description\\\": \\\"local development kafka\\\",\\n\",\n    \"        \\\"url\\\": kafka_server_url,\\n\",\n    \"        \\\"port\\\": kafka_server_port\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"class HelloKafkaMsg(BaseModel):\\n\",\n    \"    msg: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"Hello\\\",\\n\",\n    \"        description=\\\"Demo hello world message\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    kafka_brokers=kafka_brokers\\n\",\n    \")\\n\",\n    \"    \\n\",\n    \"@kafka_app.consumes()\\n\",\n    \"async def on_hello(msg: HelloKafkaMsg):\\n\",\n    \"    print(f\\\"Got data, msg={msg.msg}\\\", flush=True)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{consumer_script}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0845837e\",\n   \"metadata\": {},\n   \"source\": [\n    \"!!! info \\\\\\\"Kafka configuration\\\\\\\"\\n\",\n    \"\\n\",\n    \"    This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6b95e812\",\n   \"metadata\": {},\n   \"source\": [\n    \"!!! warning \\\\\\\"Remember to flush\\\\\\\"\\n\",\n    \"\\n\",\n    \"    Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3efde628\",\n   \"metadata\": {},\n   \"source\": [\n    \"To run this consumer, in your terminal, run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9b5100a2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"consumer_cmd = \\\"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```shell\\\\n{consumer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9f3770bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"After running the command, you should see something similar to the ouput below:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6e9ede31\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\\n\",\n      \"[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\\n\",\n      \"[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\\n\",\n      \"[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\\n\",\n      \"[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\\n\",\n      \"[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\\n\",\n      \"[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\\n\",\n      \"\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"async with ApacheKafkaBroker() as bootstrap_server:\\n\",\n    \"    os.environ[\\\"KAFKA_HOSTNAME\\\"], os.environ[\\\"KAFKA_PORT\\\"] = bootstrap_server.split(\\\":\\\")\\n\",\n    \"\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=consumer_script,\\n\",\n    \"        script_file=\\\"hello_kafka_consumer.py\\\",\\n\",\n    \"        cmd=consumer_cmd,\\n\",\n    \"        cancel_after=10,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, output.decode(\\\"utf-8\\\")\\n\",\n    \"    print(output.decode(\\\"utf-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"29f4df11\",\n   \"metadata\": {},\n   \"source\": [\n    \"Now you can interact with your consumer, by sending the messages to the subscribed 'hello' topic, don't worry, we will cover this in the next step of this guide.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d28903bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Sending first message to your consumer\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"62de2731\",\n   \"metadata\": {},\n   \"source\": [\n    \"After we have created and run our first consumer, we should send a message to it, to make sure it is working properly.\\n\",\n    \"\\n\",\n    \"If you are using the Kafka setup as described in the Intro guide, you can follow the steps listed here to send a message to the hello topic.\\n\",\n    \"\\n\",\n    \"First, connect to your running kafka broker by running:\\n\",\n    \"\\n\",\n    \"``` shell\\n\",\n    \"docker run -it kafka /bin/bash\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Then, when connected to the container, run:\\n\",\n    \"\\n\",\n    \"``` shell\\n\",\n    \"kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This will open an interactive connection to the hello topic, now you can write your mesages to the topic and they will be consumed by our consumer.\\n\",\n    \"\\n\",\n    \"In the shell, type:\\n\",\n    \"``` shell\\n\",\n    \"{\\\"msg\\\":\\\"hello\\\"}\\n\",\n    \"```\\n\",\n    \"and press enter. This will send a hello message to the topic which will be read by our running consumer and outputed to stdout.\\n\",\n    \"\\n\",\n    \"Check the output of your consumer (terminal where you ran the 'fastkafka run' command) and confirm that your consumer has read the Kafka message. You shoud see something like this:\\n\",\n    \"``` shell\\n\",\n    \"Got data, msg=hello\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fc013025\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Creating a hello Kafka producer\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a65e725f\",\n   \"metadata\": {},\n   \"source\": [\n    \"Consuming messages is only a part of this Library functionality, the other big part is producing the messages. So, let's create our first kafka producer which will send it's greetings to our consumer periodically.\\n\",\n    \"\\n\",\n    \"The producer will:\\n\",\n    \"\\n\",\n    \"1. Connect to the Kafka Broker we setup in the Intro guide\\n\",\n    \"2. Connect to the hello topic\\n\",\n    \"3. Periodically send a message to the hello world topic\\n\",\n    \"    \\n\",\n    \"To create the producer, first, create a file named <b>hello_kafka_producer.py</b> and copy the following code to it:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4c3c5876\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"from os import environ\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from fastkafka._components.logger import get_logger\\n\",\n       \"\\n\",\n       \"kafka_server_url = environ[\\\"KAFKA_HOSTNAME\\\"]\\n\",\n       \"kafka_server_port = environ[\\\"KAFKA_PORT\\\"]\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"description\\\": \\\"local development kafka\\\",\\n\",\n       \"        \\\"url\\\": kafka_server_url,\\n\",\n       \"        \\\"port\\\": kafka_server_port\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"class HelloKafkaMsg(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    kafka_brokers=kafka_brokers\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"logger = get_logger(__name__)\\n\",\n       \"\\n\",\n       \"@kafka_app.produces()\\n\",\n       \"async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\\n\",\n       \"    logger.info(f\\\"Producing: {msg}\\\")\\n\",\n       \"    return msg\\n\",\n       \"\\n\",\n       \"@kafka_app.run_in_background()\\n\",\n       \"async def hello_every_second():\\n\",\n       \"    while(True):\\n\",\n       \"        await to_hello(HelloKafkaMsg(msg=\\\"hello\\\"))\\n\",\n       \"        await asyncio.sleep(1)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"producer_script = \\\"\\\"\\\"\\n\",\n    \"from os import environ\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"\\n\",\n    \"kafka_server_url = environ[\\\"KAFKA_HOSTNAME\\\"]\\n\",\n    \"kafka_server_port = environ[\\\"KAFKA_PORT\\\"]\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"description\\\": \\\"local development kafka\\\",\\n\",\n    \"        \\\"url\\\": kafka_server_url,\\n\",\n    \"        \\\"port\\\": kafka_server_port\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"class HelloKafkaMsg(BaseModel):\\n\",\n    \"    msg: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"Hello\\\",\\n\",\n    \"        description=\\\"Demo hello world message\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    kafka_brokers=kafka_brokers\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"\\n\",\n    \"@kafka_app.produces()\\n\",\n    \"async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\\n\",\n    \"    logger.info(f\\\"Producing: {msg}\\\")\\n\",\n    \"    return msg\\n\",\n    \"\\n\",\n    \"@kafka_app.run_in_background()\\n\",\n    \"async def hello_every_second():\\n\",\n    \"    while(True):\\n\",\n    \"        await to_hello(HelloKafkaMsg(msg=\\\"hello\\\"))\\n\",\n    \"        await asyncio.sleep(1)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{producer_script}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f4d3eb5f\",\n   \"metadata\": {},\n   \"source\": [\n    \"!!! info \\\\\\\"Kafka configuration\\\\\\\"\\n\",\n    \"\\n\",\n    \"    This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f310d1cd\",\n   \"metadata\": {},\n   \"source\": [\n    \"To run this producer, in your terminal, run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"070cd807\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"producer_cmd = \\\"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```shell\\\\n{producer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8de248bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"After running the command, you should see something similar to the ouput below:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3cf137c1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\\n\",\n      \"[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\\n\",\n      \"[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\\n\",\n      \"[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\\n\",\n      \"[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\\n\",\n      \"[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\\n\",\n      \"[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\\n\",\n      \"[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\\n\",\n      \"\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"async with ApacheKafkaBroker() as bootstrap_server:\\n\",\n    \"    os.environ[\\\"KAFKA_HOSTNAME\\\"], os.environ[\\\"KAFKA_PORT\\\"] = bootstrap_server.split(\\\":\\\")\\n\",\n    \"\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=producer_script,\\n\",\n    \"        script_file=\\\"hello_kafka_producer.py\\\",\\n\",\n    \"        cmd=producer_cmd,\\n\",\n    \"        cancel_after=10,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, output.decode(\\\"utf-8\\\")\\n\",\n    \"    print(output.decode(\\\"utf-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0b869e2b\",\n   \"metadata\": {},\n   \"source\": [\n    \"Now, while the producer is running, it will send a HelloKafkaMsg every second to the hello kafka topic.\\n\",\n    \"If your consumer is still running, you should see the messages appear in its log.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1286a108\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Recap\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f3409cc2\",\n   \"metadata\": {},\n   \"source\": [\n    \"In this guide we have:\\n\",\n    \"    \\n\",\n    \"1. Created a simple Kafka consumer using FastKafka\\n\",\n    \"2. Sent a message to our consumer trough Kafka\\n\",\n    \"3. Created a simple Kafka producer using FastKafka\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_03_Authentication.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"66642296\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Authentication\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"126aa447\",\n   \"metadata\": {},\n   \"source\": [\n    \"## TLS Authentication\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"00dc91fc\",\n   \"metadata\": {},\n   \"source\": [\n    \"sasl_mechanism (str) – Authentication mechanism when security_protocol is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN\\n\",\n    \"\\n\",\n    \"sasl_plain_username (str) – username for SASL PLAIN authentication. Default: None\\n\",\n    \"\\n\",\n    \"sasl_plain_password (str) – password for SASL PLAIN authentication. Default: None\\n\",\n    \"\\n\",\n    \"sasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token provider instance. (See kafka.oauth.abstract). Default: None\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_04_Github_Actions_Workflow.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"aae8a764\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Deploy FastKafka docs to GitHub Pages\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"665f266f\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Getting started\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c5a70f46\",\n   \"metadata\": {},\n   \"source\": [\n    \"Add your workflow file `.github/workflows/fastkafka_docs_deploy.yml` and push it to your remote default branch.\\n\",\n    \"\\n\",\n    \"Here is an example workflow:\\n\",\n    \"\\n\",\n    \"```yaml\\n\",\n    \"name: Deploy FastKafka Generated Documentation to GitHub Pages\\n\",\n    \"\\n\",\n    \"on:\\n\",\n    \"  push:\\n\",\n    \"    branches: [ \\\"main\\\", \\\"master\\\" ]\\n\",\n    \"  workflow_dispatch:\\n\",\n    \"\\n\",\n    \"jobs:\\n\",\n    \"  deploy:\\n\",\n    \"    runs-on: ubuntu-latest\\n\",\n    \"    permissions:\\n\",\n    \"      contents: write\\n\",\n    \"    steps:\\n\",\n    \"      - uses: airtai/workflows/fastkafka-ghp@main\\n\",\n    \"        with:\\n\",\n    \"          app: \\\"test_fastkafka.application:kafka_app\\\"\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b29b85be\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Options\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f5d2cf03\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"\\n\",\n    \"### Set app location\\n\",\n    \"\\n\",\n    \"Input in the form of `path:app`, where `path` is the path to a Python file and `app` is an object of type `FastKafka`:\\n\",\n    \"\\n\",\n    \"```yaml\\n\",\n    \"- name: Deploy\\n\",\n    \"  uses: airtai/workflows/fastkafka-ghp@main\\n\",\n    \"  with:\\n\",\n    \"    app: \\\"test_fastkafka.application:kafka_app\\\"\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"In the above example, `FastKafka` app is named as `kafka_app` and it is available in the `application` submodule of the `test_fastkafka` module.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"67a09999\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Example Repository\\n\",\n    \"\\n\",\n    \"A `FastKafka`-based library that uses the above-mentioned workfow actions to publish FastKafka docs to `Github Pages` can be found [here](https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml).\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_05_Lifespan_Handler.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b490915d\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Lifespan Events\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"305342b0\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"import platform\\n\",\n    \"from pathlib import Path\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from typing import Tuple\\n\",\n    \"\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from fastkafka._components.helpers import _import_from_string, change_dir\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, Tester, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"ae85b1d1\",\n   \"metadata\": {},\n   \"source\": [\n    \"Did you know that you can define some special code that runs before and after your Kafka application? This code will be executed just once, but it covers the whole lifespan of your app! :rocket:\\n\",\n    \"\\n\",\n    \"Lets break it down:\\n\",\n    \"\\n\",\n    \"You can define logic (code) that should be executed before the application starts up. This is like a warm-up for your app, getting it ready to consume and produce messages.\\n\",\n    \"\\n\",\n    \"Similarly, you can define logic (code) that should be executed when the application is shutting down. This is like a cool-down for your app, making sure everything is properly closed and cleaned up.\\n\",\n    \"\\n\",\n    \"By executing code before consuming and after producing, you cover the entire lifecycle of your application :tada:\\n\",\n    \"\\n\",\n    \"This is super handy for setting up shared resources that are needed across consumers and producers, like a database connection pool or a machine learning model. And the best part? You can clean up these resources when the app is shutting down!\\n\",\n    \"\\n\",\n    \"So lets give it a try and see how it can make your Kafka app even more awesome! :muscle:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c7acf3cf\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Lifespan example - Iris prediction model\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"47606657\",\n   \"metadata\": {},\n   \"source\": [\n    \"Let's dive into an example to see how you can leverage the lifecycle handler to solve a common use case. Imagine that you have some machine learning models that need to consume incoming messages and produce response/prediction messages. These models are shared among consumers and producers, which means you don't want to load them for every message.\\n\",\n    \"\\n\",\n    \"Here's where the lifecycle handler comes to the rescue! By loading the model before the messages are consumed and produced, but only right before the application starts receiving messages, you can ensure that the model is ready to use without compromising the performance of your tests. In the upcoming sections, we'll walk you through how to initialize an Iris species prediction model and use it in your developed application.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"29f7f3b7\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Lifespan\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fca909ff\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can define this startup and shutdown logic using the lifespan parameter of the FastKafka app, and an async context manager.\\n\",\n    \"\\n\",\n    \"Let's start with an example and then see it in detail.\\n\",\n    \"\\n\",\n    \"We create an async function lifespan() with yield like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e0d6a994\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from sklearn.datasets import load_iris\\n\",\n       \"from sklearn.linear_model import LogisticRegression\\n\",\n       \"from contextlib import asynccontextmanager\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"ml_models = {}\\n\",\n       \"\\n\",\n       \"@asynccontextmanager\\n\",\n       \"async def lifespan(app: FastKafka):\\n\",\n       \"    # Load the ML model\\n\",\n       \"    print(\\\"Loading the model!\\\")\\n\",\n       \"    X, y = load_iris(return_X_y=True)\\n\",\n       \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\\n\",\n       \"    yield\\n\",\n       \"    # Clean up the ML models and release the resources\\n\",\n       \"    \\n\",\n       \"    print(\\\"Exiting, clearing model dict!\\\")\\n\",\n       \"    ml_models.clear()\\n\",\n       \"    \\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_lifespan = \\\"\\\"\\\"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"import_fastkafka = \\\"\\\"\\\"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"lifespan = \\\"\\\"\\\"ml_models = {}\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    print(\\\"Loading the model!\\\")\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    \\n\",\n    \"    print(\\\"Exiting, clearing model dict!\\\")\\n\",\n    \"    ml_models.clear()\\n\",\n    \"    \\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{import_lifespan + import_fastkafka + lifespan}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f5ebc906\",\n   \"metadata\": {},\n   \"source\": [\n    \"The first thing to notice, is that we are defining an async function with `yield`. This is very similar to Dependencies with `yield`.\\n\",\n    \"\\n\",\n    \"The first part of the function, before the `yield`, will be executed **before** the application starts.\\n\",\n    \"And the part after the `yield` will be executed **after** the application has finished.\\n\",\n    \"\\n\",\n    \"This lifespan will create an iris_prediction model on application startup and cleanup the references after the app is shutdown.\\n\",\n    \"\\n\",\n    \"The lifespan will be passed an KafkaApp reference on startup of your application, which you can use to reference your application on startup.\\n\",\n    \"\\n\",\n    \"For demonstration sake, we also added prints so that when running the app we can see that our lifespan was called.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"7b74c00f\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Async context manager\\n\",\n    \"\\n\",\n    \"Context managers can be used in `with` blocks, our lifespan, for example could be used like this:\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"ml_models = {}\\n\",\n    \"async with lifespan(None):\\n\",\n    \"    print(ml_models)\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"When you create a context manager or an async context manager, what it does is that, before entering the `with` block, it will execute the code before the `yield`, and after exiting the `with` block, it will execute the code after the `yield`.\\n\",\n    \"\\n\",\n    \"If you want to learn more about context managers and contextlib decorators, please visit [Python official docs](https://docs.python.org/3/library/contextlib.html)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"250d3323\",\n   \"metadata\": {},\n   \"source\": [\n    \"## App demo\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a449101a\",\n   \"metadata\": {},\n   \"source\": [\n    \"### FastKafka app\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6d6f4163\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets now create our application using the created lifespan handler.\\n\",\n    \"\\n\",\n    \"Notice how we passed our lifespan handler to the app when constructing it trough the `lifespan` argument.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fd0ed2c8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Iris predictions\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \"    lifespan=lifespan,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"app = \\\"\\\"\\\"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{import_fastkafka + app}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"972ebb39\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Data modeling\\n\",\n    \"\\n\",\n    \"Lets model the Iris data for our app:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1ea24631\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n       \"\\n\",\n       \"class IrisInputData(BaseModel):\\n\",\n       \"    sepal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    sepal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"class IrisPrediction(BaseModel):\\n\",\n       \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_pydantic = \\\"\\\"\\\"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"data_model = \\\"\\\"\\\"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{import_pydantic + data_model}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e6ab2c5c\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Consumers and producers\\n\",\n    \"\\n\",\n    \"Lets create a consumer and producer for our app that will generate predictions from input iris data.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"74aee2c5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n       \"async def on_input_data(msg: IrisInputData):\\n\",\n       \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n       \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n       \"    )[0]\\n\",\n       \"\\n\",\n       \"    await to_predictions(species_class)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n       \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n       \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n       \"\\n\",\n       \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n       \"    return prediction\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"producers_and_consumers = \\\"\\\"\\\"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{producers_and_consumers}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e47123e0\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Final app\\n\",\n    \"\\n\",\n    \"The final app looks like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5969a922\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from sklearn.datasets import load_iris\\n\",\n       \"from sklearn.linear_model import LogisticRegression\\n\",\n       \"from contextlib import asynccontextmanager\\n\",\n       \"\\n\",\n       \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"class IrisInputData(BaseModel):\\n\",\n       \"    sepal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    sepal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"class IrisPrediction(BaseModel):\\n\",\n       \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n       \"ml_models = {}\\n\",\n       \"\\n\",\n       \"@asynccontextmanager\\n\",\n       \"async def lifespan(app: FastKafka):\\n\",\n       \"    # Load the ML model\\n\",\n       \"    print(\\\"Loading the model!\\\")\\n\",\n       \"    X, y = load_iris(return_X_y=True)\\n\",\n       \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\\n\",\n       \"    yield\\n\",\n       \"    # Clean up the ML models and release the resources\\n\",\n       \"    \\n\",\n       \"    print(\\\"Exiting, clearing model dict!\\\")\\n\",\n       \"    ml_models.clear()\\n\",\n       \"    \\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Iris predictions\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \"    lifespan=lifespan,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n       \"async def on_input_data(msg: IrisInputData):\\n\",\n       \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n       \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n       \"    )[0]\\n\",\n       \"\\n\",\n       \"    await to_predictions(species_class)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n       \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n       \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n       \"\\n\",\n       \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n       \"    return prediction\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"complete_app = (\\n\",\n    \"    import_lifespan\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_fastkafka\\n\",\n    \"    + data_model\\n\",\n    \"    + lifespan\\n\",\n    \"    + app\\n\",\n    \"    + producers_and_consumers\\n\",\n    \")\\n\",\n    \"md(f\\\"```python\\\\n{complete_app}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b1a3a24c\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Running the app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f2509682\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"Now we can run the app with your custom lifespan handler. Copy the code above in lifespan_example.py and run it by running\\n\",\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_file = \\\"lifespan_example.py\\\"\\n\",\n    \"cmd = (\\n\",\n    \"    \\\"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\\\"\\n\",\n    \")\\n\",\n    \"md(\\n\",\n    \"    f\\\"Now we can run the app with your custom lifespan handler. Copy the code above in lifespan_example.py and run it by running\\\\n```shell\\\\n{cmd}\\\\n```\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d7a53cc7\",\n   \"metadata\": {},\n   \"source\": [\n    \"When you run the app, you should see a simmilar output to the one below:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ca8952a2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def _run_example_app(\\n\",\n    \"    *, app_example: str, bootstrap_server: str, script_file: str, cmd: str\\n\",\n    \") -> Tuple[int, str]:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=app_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=20,\\n\",\n    \"    )\\n\",\n    \"    return exit_code, output.decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"bea6a823\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-02 12:09:07.075 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-02 12:09:07.075 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_WindowsSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-02 12:09:07.075 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-02 12:09:07.083 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-02 12:09:07.091 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-02 12:09:07.091 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-02 12:09:07.099 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-02 12:09:07.099 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"\\n\",\n      \"23-06-02 12:09:07.099 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:09:07.107 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50644\\n\",\n      \"\\n\",\n      \"23-06-02 12:09:07.107 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:09:07.107 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50645\\n\",\n      \"\\n\",\n      \"23-06-02 12:09:07.107 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:09:07.115 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50646\\n\",\n      \"\\n\",\n      \"23-06-02 12:09:07.115 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:09:07.115 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50647\\n\",\n      \"23-06-02 12:09:07.115 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\"\n     ]\n    },\n    {\n     \"ename\": \"ValueError\",\n     \"evalue\": \"Could not start zookeeper with params: [{'zookeeper_port': 2181}, {'zookeeper_port': '50644'}, {'zookeeper_port': '50645'}, {'zookeeper_port': '50646'}]\",\n     \"output_type\": \"error\",\n     \"traceback\": [\n      \"\\u001b[1;31m---------------------------------------------------------------------------\\u001b[0m\",\n      \"\\u001b[1;31mValueError\\u001b[0m                                Traceback (most recent call last)\",\n      \"Cell \\u001b[1;32mIn[9], line 3\\u001b[0m\\n\\u001b[0;32m      1\\u001b[0m \\u001b[38;5;66;03m# | hide\\u001b[39;00m\\n\\u001b[1;32m----> 3\\u001b[0m \\u001b[38;5;28;43;01mwith\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43mApacheKafkaBroker\\u001b[49m\\u001b[43m(\\u001b[49m\\n\\u001b[0;32m      4\\u001b[0m \\u001b[43m    \\u001b[49m\\u001b[43mtopicas\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43m[\\u001b[49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[38;5;124;43mhello_world\\u001b[39;49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[43m]\\u001b[49m\\u001b[43m,\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[43mapply_nest_asyncio\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[38;5;28;43;01mTrue\\u001b[39;49;00m\\n\\u001b[0;32m      5\\u001b[0m \\u001b[43m)\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[38;5;28;43;01mas\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43mbootstrap_server\\u001b[49m\\u001b[43m:\\u001b[49m\\n\\u001b[0;32m      6\\u001b[0m \\u001b[43m    \\u001b[49m\\u001b[43mexit_code\\u001b[49m\\u001b[43m,\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[43moutput\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43m \\u001b[49m\\u001b[38;5;28;43;01mawait\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43m_run_example_app\\u001b[49m\\u001b[43m(\\u001b[49m\\n\\u001b[0;32m      7\\u001b[0m \\u001b[43m        \\u001b[49m\\u001b[43mapp_example\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43mcomplete_app\\u001b[49m\\u001b[43m,\\u001b[49m\\n\\u001b[0;32m      8\\u001b[0m \\u001b[43m        \\u001b[49m\\u001b[43mbootstrap_server\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43mbootstrap_server\\u001b[49m\\u001b[43m,\\u001b[49m\\n\\u001b[0;32m      9\\u001b[0m \\u001b[43m        \\u001b[49m\\u001b[43mscript_file\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43mscript_file\\u001b[49m\\u001b[43m,\\u001b[49m\\n\\u001b[0;32m     10\\u001b[0m \\u001b[43m        \\u001b[49m\\u001b[43mcmd\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43mcmd\\u001b[49m\\u001b[43m,\\u001b[49m\\n\\u001b[0;32m     11\\u001b[0m \\u001b[43m    \\u001b[49m\\u001b[43m)\\u001b[49m\\n\\u001b[0;32m     12\\u001b[0m \\u001b[43m    \\u001b[49m\\u001b[43mexpected_returncode\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43m \\u001b[49m\\u001b[38;5;241;43m1\\u001b[39;49m\\u001b[43m \\u001b[49m\\u001b[38;5;28;43;01mif\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43mplatform\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43msystem\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[38;5;241;43m==\\u001b[39;49m\\u001b[43m \\u001b[49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[38;5;124;43mWindows\\u001b[39;49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[43m \\u001b[49m\\u001b[38;5;28;43;01melse\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[38;5;241;43m0\\u001b[39;49m\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:289\\u001b[0m, in \\u001b[0;36mApacheKafkaBroker.__enter__\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    287\\u001b[0m \\u001b[38;5;28;01mdef\\u001b[39;00m \\u001b[38;5;21m__enter__\\u001b[39m(\\u001b[38;5;28mself\\u001b[39m) \\u001b[38;5;241m-\\u001b[39m\\u001b[38;5;241m>\\u001b[39m \\u001b[38;5;28mstr\\u001b[39m:\\n\\u001b[0;32m    288\\u001b[0m     \\u001b[38;5;66;03m#         ApacheKafkaBroker._check_deps()\\u001b[39;00m\\n\\u001b[1;32m--> 289\\u001b[0m     \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[38;5;28;43mself\\u001b[39;49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43mstart\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:621\\u001b[0m, in \\u001b[0;36mstart\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    618\\u001b[0m         logger\\u001b[38;5;241m.\\u001b[39merror(msg)\\n\\u001b[0;32m    619\\u001b[0m         \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;167;01mRuntimeError\\u001b[39;00m(msg)\\n\\u001b[1;32m--> 621\\u001b[0m retval \\u001b[38;5;241m=\\u001b[39m \\u001b[43mloop\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43mrun_until_complete\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[38;5;28;43mself\\u001b[39;49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43m_start\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\u001b[43m)\\u001b[49m\\n\\u001b[0;32m    622\\u001b[0m logger\\u001b[38;5;241m.\\u001b[39minfo(\\u001b[38;5;124mf\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00m\\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m\\u001b[38;5;18m__class__\\u001b[39m\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m.start(): returning \\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mretval\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m\\\"\\u001b[39m)\\n\\u001b[0;32m    623\\u001b[0m \\u001b[38;5;28;01mreturn\\u001b[39;00m retval\\n\",\n      \"File \\u001b[1;32m~\\\\dev\\\\fastkafka\\\\venv\\\\Lib\\\\site-packages\\\\nest_asyncio.py:90\\u001b[0m, in \\u001b[0;36m_patch_loop.<locals>.run_until_complete\\u001b[1;34m(self, future)\\u001b[0m\\n\\u001b[0;32m     87\\u001b[0m \\u001b[38;5;28;01mif\\u001b[39;00m \\u001b[38;5;129;01mnot\\u001b[39;00m f\\u001b[38;5;241m.\\u001b[39mdone():\\n\\u001b[0;32m     88\\u001b[0m     \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;167;01mRuntimeError\\u001b[39;00m(\\n\\u001b[0;32m     89\\u001b[0m         \\u001b[38;5;124m'\\u001b[39m\\u001b[38;5;124mEvent loop stopped before Future completed.\\u001b[39m\\u001b[38;5;124m'\\u001b[39m)\\n\\u001b[1;32m---> 90\\u001b[0m \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[43mf\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43mresult\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\n\",\n      \"File \\u001b[1;32m~\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python311\\\\Lib\\\\asyncio\\\\futures.py:203\\u001b[0m, in \\u001b[0;36mFuture.result\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    201\\u001b[0m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m__log_traceback \\u001b[38;5;241m=\\u001b[39m \\u001b[38;5;28;01mFalse\\u001b[39;00m\\n\\u001b[0;32m    202\\u001b[0m \\u001b[38;5;28;01mif\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_exception \\u001b[38;5;129;01mis\\u001b[39;00m \\u001b[38;5;129;01mnot\\u001b[39;00m \\u001b[38;5;28;01mNone\\u001b[39;00m:\\n\\u001b[1;32m--> 203\\u001b[0m     \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_exception\\u001b[38;5;241m.\\u001b[39mwith_traceback(\\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_exception_tb)\\n\\u001b[0;32m    204\\u001b[0m \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_result\\n\",\n      \"File \\u001b[1;32m~\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python311\\\\Lib\\\\asyncio\\\\tasks.py:267\\u001b[0m, in \\u001b[0;36mTask.__step\\u001b[1;34m(***failed resolving arguments***)\\u001b[0m\\n\\u001b[0;32m    263\\u001b[0m \\u001b[38;5;28;01mtry\\u001b[39;00m:\\n\\u001b[0;32m    264\\u001b[0m     \\u001b[38;5;28;01mif\\u001b[39;00m exc \\u001b[38;5;129;01mis\\u001b[39;00m \\u001b[38;5;28;01mNone\\u001b[39;00m:\\n\\u001b[0;32m    265\\u001b[0m         \\u001b[38;5;66;03m# We use the `send` method directly, because coroutines\\u001b[39;00m\\n\\u001b[0;32m    266\\u001b[0m         \\u001b[38;5;66;03m# don't have `__iter__` and `__next__` methods.\\u001b[39;00m\\n\\u001b[1;32m--> 267\\u001b[0m         result \\u001b[38;5;241m=\\u001b[39m \\u001b[43mcoro\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43msend\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[38;5;28;43;01mNone\\u001b[39;49;00m\\u001b[43m)\\u001b[49m\\n\\u001b[0;32m    268\\u001b[0m     \\u001b[38;5;28;01melse\\u001b[39;00m:\\n\\u001b[0;32m    269\\u001b[0m         result \\u001b[38;5;241m=\\u001b[39m coro\\u001b[38;5;241m.\\u001b[39mthrow(exc)\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:561\\u001b[0m, in \\u001b[0;36m_start\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    558\\u001b[0m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mtemporary_directory \\u001b[38;5;241m=\\u001b[39m TemporaryDirectory()\\n\\u001b[0;32m    559\\u001b[0m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mtemporary_directory_path \\u001b[38;5;241m=\\u001b[39m Path(\\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mtemporary_directory\\u001b[38;5;241m.\\u001b[39m\\u001b[38;5;21m__enter__\\u001b[39m())\\n\\u001b[1;32m--> 561\\u001b[0m \\u001b[38;5;28;01mawait\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_start_zookeeper()\\n\\u001b[0;32m    562\\u001b[0m \\u001b[38;5;28;01mawait\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_start_kafka()\\n\\u001b[0;32m    564\\u001b[0m listener_port \\u001b[38;5;241m=\\u001b[39m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mkafka_kwargs\\u001b[38;5;241m.\\u001b[39mget(\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;124mlistener_port\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m, \\u001b[38;5;241m9092\\u001b[39m)\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:515\\u001b[0m, in \\u001b[0;36m_start_zookeeper\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    512\\u001b[0m \\u001b[38;5;129m@patch\\u001b[39m\\n\\u001b[0;32m    513\\u001b[0m \\u001b[38;5;28;01masync\\u001b[39;00m \\u001b[38;5;28;01mdef\\u001b[39;00m \\u001b[38;5;21m_start_zookeeper\\u001b[39m(\\u001b[38;5;28mself\\u001b[39m: ApacheKafkaBroker) \\u001b[38;5;241m-\\u001b[39m\\u001b[38;5;241m>\\u001b[39m \\u001b[38;5;28;01mNone\\u001b[39;00m:\\n\\u001b[0;32m    514\\u001b[0m \\u001b[38;5;250m    \\u001b[39m\\u001b[38;5;124;03m\\\"\\\"\\\"Starts a local ZooKeeper instance asynchronously.\\\"\\\"\\\"\\u001b[39;00m\\n\\u001b[1;32m--> 515\\u001b[0m     \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[38;5;28;01mawait\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_start_service(\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;124mzookeeper\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m)\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:503\\u001b[0m, in \\u001b[0;36m_start_service\\u001b[1;34m(self, service)\\u001b[0m\\n\\u001b[0;32m    500\\u001b[0m         \\u001b[38;5;28msetattr\\u001b[39m(\\u001b[38;5;28mself\\u001b[39m, \\u001b[38;5;124mf\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mservice\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m_task\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m, service_task)\\n\\u001b[0;32m    501\\u001b[0m         \\u001b[38;5;28;01mreturn\\u001b[39;00m\\n\\u001b[1;32m--> 503\\u001b[0m \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;167;01mValueError\\u001b[39;00m(\\u001b[38;5;124mf\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;124mCould not start \\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mservice\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m with params: \\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mconfigs_tried\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m\\\"\\u001b[39m)\\n\",\n      \"\\u001b[1;31mValueError\\u001b[0m: Could not start zookeeper with params: [{'zookeeper_port': 2181}, {'zookeeper_port': '50644'}, {'zookeeper_port': '50645'}, {'zookeeper_port': '50646'}]\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topicas=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=59092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    exit_code, output = await _run_example_app(\\n\",\n    \"        app_example=complete_app,\\n\",\n    \"        bootstrap_server=bootstrap_server,\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"    )\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, output\\n\",\n    \"    assert \\\"Loading the model!\\\" in output, output\\n\",\n    \"    assert \\\"Exiting, clearing model dict!\\\" in output, output\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"417e5eb3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(output)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2192a4f5\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Recap\\n\",\n    \"\\n\",\n    \"In this guide we have defined a lifespan handler and passed to our FastKafka app.\\n\",\n    \"\\n\",\n    \"Some important points are:\\n\",\n    \"\\n\",\n    \"1. Lifespan handler is implemented as [AsyncContextManager](https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager)\\n\",\n    \"2. Code **before** yield in lifespan will be executed **before** application **startup**\\n\",\n    \"3. Code **after** yield in lifespan will be executed **after** application **shutdown**\\n\",\n    \"4. You can pass your lifespan handler to FastKafka app on initialisation by passing a `lifespan` argument\\n\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_06_Benchmarking_FastKafka.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8b1b12fd\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Benchmarking FastKafka app\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e79e80d6\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Prerequisites\\n\",\n    \"\\n\",\n    \"To benchmark a `FastKafka` project, you will need the following:\\n\",\n    \"\\n\",\n    \"1. A library built with `FastKafka`.\\n\",\n    \"2. A running `Kafka` instance to benchmark the FastKafka application against.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"36a63ab0\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Creating FastKafka Code\\n\",\n    \"\\n\",\n    \"Let's create a `FastKafka`-based application and write it to the `application.py` file based on the [tutorial](/docs#tutorial).\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file\\n\",\n    \"\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"`FastKafka` has a decorator for benchmarking which is appropriately called as `benchmark`.\\n\",\n    \"Let's edit our `application.py` file and add the `benchmark` decorator to the consumes method.\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file with benchmark\\n\",\n    \"\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"@kafka_app.benchmark(interval=1, sliding_window_size=5)\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Here we are conducting a benchmark of a function that consumes data from the `input_data` topic with an interval of 1 second and a sliding window size of 5. \\n\",\n    \"\\n\",\n    \"This `benchmark` method uses the `interval` parameter to calculate the results over a specific time period, and the `sliding_window_size` parameter to determine the maximum number of results to use in calculating the average throughput and standard deviation. \\n\",\n    \"\\n\",\n    \"This benchmark is important to ensure that the function is performing optimally and to identify any areas for improvement.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"69cd27f1\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Starting Kafka\\n\",\n    \"\\n\",\n    \"If you already have a `Kafka` running somewhere, then you can skip this step. \\n\",\n    \"\\n\",\n    \"Please keep in mind that your benchmarking results may be affected by bottlenecks such as network, CPU cores in the Kafka machine, or even the Kafka configuration itself.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b147ee29\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Installing Java and Kafka\\n\",\n    \"We need a working `Kafka`instance to benchmark our `FastKafka` app, and to run `Kafka` we need `Java`. Thankfully, `FastKafka` comes with a CLI to install both `Java` and `Kafka` on our machine.\\n\",\n    \"\\n\",\n    \"So, let's install `Java` and `Kafka` by executing the following command.\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"fastkafka testing install_deps\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"The above command will extract `Kafka` scripts at the location \\\"$HOME/.local/kafka_2.13-3.3.2\\\" on your machine. \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8a460d8d\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Creating configuration for Zookeeper and Kafka\\n\",\n    \"Now we need to start `Zookeeper` and `Kafka` separately, and to start them we need `zookeeper.properties` and `kafka.properties` files.\\n\",\n    \"\\n\",\n    \"Let's create a folder inside the folder where `Kafka` scripts were extracted and change directory into it.\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Let's create a file called `zookeeper.properties` and write the following content to the file:\\n\",\n    \"\\n\",\n    \"```txt\\n\",\n    \"dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\\n\",\n    \"clientPort=2181\\n\",\n    \"maxClientCnxns=0\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Similarly, let's create a file called `kafka.properties` and write the following content to the file:\\n\",\n    \"\\n\",\n    \"```txt\\n\",\n    \"broker.id=0\\n\",\n    \"listeners=PLAINTEXT://:9092\\n\",\n    \"\\n\",\n    \"num.network.threads=3\\n\",\n    \"num.io.threads=8\\n\",\n    \"socket.send.buffer.bytes=102400\\n\",\n    \"socket.receive.buffer.bytes=102400\\n\",\n    \"socket.request.max.bytes=104857600\\n\",\n    \"\\n\",\n    \"num.partitions=1\\n\",\n    \"num.recovery.threads.per.data.dir=1\\n\",\n    \"offsets.topic.replication.factor=1\\n\",\n    \"transaction.state.log.replication.factor=1\\n\",\n    \"transaction.state.log.min.isr=1\\n\",\n    \"\\n\",\n    \"log.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\\n\",\n    \"log.flush.interval.messages=10000\\n\",\n    \"log.flush.interval.ms=1000\\n\",\n    \"log.retention.hours=168\\n\",\n    \"log.retention.bytes=1073741824\\n\",\n    \"log.segment.bytes=1073741824\\n\",\n    \"log.retention.check.interval.ms=300000\\n\",\n    \"\\n\",\n    \"zookeeper.connect=localhost:2181\\n\",\n    \"zookeeper.connection.timeout.ms=18000\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"07392f57\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Starting Zookeeper and Kafka\\n\",\n    \"\\n\",\n    \"We need two different terminals to run `Zookeeper` in one and `Kafka` in another.\\n\",\n    \"Let's open a new terminal and run the following commands to start `Zookeeper`:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\\n\",\n    \"cd $HOME/.local/kafka_2.13-3.3.2/bin\\n\",\n    \"./zookeeper-server-start.sh ../data_dir/zookeeper.properties\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Once `Zookeeper` is up and running, open a new terminal and execute the follwing commands to start `Kafka`:\\n\",\n    \"```cmd\\n\",\n    \"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\\n\",\n    \"cd $HOME/.local/kafka_2.13-3.3.2/bin\\n\",\n    \"./kafka-server-start.sh ../data_dir/kafka.properties\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Now we have both `Zookeeper` and `Kafka` up and running.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"ac98a6ac\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Creating topics in Kafka\\n\",\n    \"\\n\",\n    \"In a new terminal, please execute the following command to create necessary topics in `Kafka`:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\\n\",\n    \"cd $HOME/.local/kafka_2.13-3.3.2/bin\\n\",\n    \"./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\\n\",\n    \"./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\\n\",\n    \"```\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2e947d67\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"#### Populating topics with dummy data\\n\",\n    \"\\n\",\n    \"To benchmark our `FastKafka` app, we need some data in `Kafka` topics. \\n\",\n    \"\\n\",\n    \"In the same terminal, let's create some dummy data:\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"yes '{\\\"sepal_length\\\": 0.7739560486, \\\"sepal_width\\\": 0.8636615789, \\\"petal_length\\\": 0.6122663046, \\\"petal_width\\\": 0.1338914722}' | head -n 1000000 > /tmp/test_data\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"This command will create a file called `test_data` in the `tmp` folder with one million rows of text. This will act as dummy data to populate the `input_data` topic.\\n\",\n    \"\\n\",\n    \"Let's populate the created topic `input_data` with the dummy data which we created above:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Now our topic `input_data` has one million records/messages in it. If you want more messages in topic, you can simply execute the above command again and again.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fac21518\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Benchmarking FastKafka\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Once `Zookeeper` and `Kafka` are ready, benchmarking `FastKafka` app is as simple as running the `fastkafka run` command:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This command will start the `FastKafka` app and begin consuming messages from `Kafka`, which we spun up earlier. \\n\",\n    \"Additionally, the same command will output all of the benchmark throughputs based on the `interval` and `sliding_window_size` values.\\n\",\n    \"\\n\",\n    \"The output for the `fastkafka run` command is:\\n\",\n    \"\\n\",\n    \"```txt\\n\",\n    \"[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\\n\",\n    \"[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\\n\",\n    \"[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n    \"[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n    \"[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\\n\",\n    \"ost:9092', 'max_poll_records': 100}\\n\",\n    \"[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n    \"[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\\n\",\n    \"[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\\n\",\n    \"[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n    \"[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\\n\",\n    \"[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\\n\",\n    \"[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\\n\",\n    \"[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\\n\",\n    \"[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\\n\",\n    \"[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\\n\",\n    \"[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\\n\",\n    \"=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\\n\",\n    \"[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Based on the output, when using 1 worker, our `FastKafka` app achieved a `throughput` of 93k messages per second and an `average throughput` of 93k messages per second.\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f1e1fcb5\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import sys\\n\",\n    \"from IPython.display import Markdown\\n\",\n    \"\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from pathlib import Path\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from fastkafka._components.helpers import change_dir, _import_from_string\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1f7da504\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"# | notest\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3d6f852f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a6a18bc\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8eecb8b6\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Encoding and Decoding Kafka Messages with FastKafka\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"642ac9ba\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Prerequisites\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"1. A basic knowledge of `FastKafka` is needed to proceed with this guide. If you are not familiar with `FastKafka`, please go through the [tutorial](/docs#tutorial) first.\\n\",\n    \"2. `FastKafka` with its dependencies installed is needed. Please install `FastKafka` using the command - `pip install fastkafka`\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"629f0460\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Ways to Encode and Decode Messages with FastKafka\\n\",\n    \"\\n\",\n    \"In python, by default, we send Kafka messages as bytes. Even if our message is a string, we convert it to bytes and then send it to Kafka topic. imilarly, while consuming messages, we consume them as bytes and then convert them to strings.\\n\",\n    \"\\n\",\n    \"In FastKafka, we specify message schema using Pydantic models as mentioned in [tutorial](/docs#messages):\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"# Define Pydantic models for Kafka messages\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Then, we send and receive messages as instances of Pydantic models which we defined. So, FastKafka needs a way to encode/decode to these Pydantic model messages to bytes in order to send/receive messages to/from Kafka topics.\\n\",\n    \"\\n\",\n    \"The `@consumes` and `@produces` methods of FastKafka accept a parameter called `decoder`/`encoder` to decode/encode Kafka messages. FastKafka provides three ways to encode and decode messages:\\n\",\n    \"\\n\",\n    \"1. json - This is the default encoder/decoder option in FastKafka. While producing, this option converts our instance of Pydantic model messages to a JSON string and then converts it to bytes before sending it to the topic. While consuming, it converts bytes to a JSON string and then constructs an instance of Pydantic model from the JSON string.\\n\",\n    \"2. avro - This option uses Avro encoding/decoding to convert instances of Pydantic model messages to bytes while producing, and while consuming, it constructs an instance of Pydantic model from bytes.\\n\",\n    \"3. custom encoder/decoder - If you are not happy with the json or avro encoder/decoder options, you can write your own encoder/decoder functions and use them to encode/decode Pydantic messages.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"37e1a5ee\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 1. Json encoder and decoder\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"The default option in FastKafka is json encoder/decoder. This option, while producing, converts our instance of pydantic model messages to json string and then converts to bytes before sending it to the topics. While consuming it converts bytes to json string and then constructs instance of pydantic model from json string.\\n\",\n    \"\\n\",\n    \"We can use the application from [tutorial](/docs#running-the-service) as is, and it will use the json encoder/decoder by default. But, for clarity, let's modify it to explicitly accept the 'json' encoder/decoder parameter:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"32362501\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"# content of the \\\"application.py\\\" file\\n\",\n       \"\\n\",\n       \"from contextlib import asynccontextmanager\\n\",\n       \"\\n\",\n       \"from sklearn.datasets import load_iris\\n\",\n       \"from sklearn.linear_model import LogisticRegression\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"ml_models = {}\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@asynccontextmanager\\n\",\n       \"async def lifespan(app: FastKafka):\\n\",\n       \"    # Load the ML model\\n\",\n       \"    X, y = load_iris(return_X_y=True)\\n\",\n       \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n       \"        X, y\\n\",\n       \"    )\\n\",\n       \"    yield\\n\",\n       \"    # Clean up the ML models and release the resources\\n\",\n       \"    ml_models.clear()\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n       \"\\n\",\n       \"class IrisInputData(BaseModel):\\n\",\n       \"    sepal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    sepal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"class IrisPrediction(BaseModel):\\n\",\n       \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n       \"    \\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"    },\\n\",\n       \"    \\\"production\\\": {\\n\",\n       \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n       \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n       \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Iris predictions\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \"    lifespan=lifespan,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"@kafka_app.consumes(topic=\\\"input_data\\\", decoder=\\\"json\\\")\\n\",\n       \"async def on_input_data(msg: IrisInputData):\\n\",\n       \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n       \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n       \"    )[0]\\n\",\n       \"\\n\",\n       \"    await to_predictions(species_class)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces(topic=\\\"predictions\\\", encoder=\\\"json\\\")\\n\",\n       \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n       \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n       \"\\n\",\n       \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n       \"    return prediction\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"kafka_app_source = \\\"\\\"\\\"\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"    \\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", decoder=\\\"json\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\", encoder=\\\"json\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"Markdown(\\n\",\n    \"    f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file\\n\",\n    \"{kafka_app_source}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e7759193\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-07-05 08:19:23.742 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-07-05 08:19:23.742 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-07-05 08:19:23.754 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-05 08:19:23.754 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-05 08:19:23.763 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-05 08:19:23.764 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-05 08:19:23.764 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-05 08:19:23.765 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-05 08:19:23.765 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-05 08:19:23.765 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-05 08:19:23.765 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-05 08:19:23.766 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\\n\",\n      \"23-07-05 08:19:23.766 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-05 08:19:23.769 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-05 08:19:23.769 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-05 08:19:23.769 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-05 08:19:23.769 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-05 08:19:23.770 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-05 08:19:23.770 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-07-05 08:19:23.770 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-05 08:19:27.765 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-05 08:19:27.765 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-05 08:19:27.766 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-05 08:19:27.766 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-05 08:19:27.766 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-05 08:19:27.767 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-05 08:19:27.767 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-05 08:19:27.767 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-05 08:19:27.767 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    src_path = Path(d) / \\\"application.py\\\"\\n\",\n    \"    with open(src_path, \\\"w\\\") as source:\\n\",\n    \"        source.write(kafka_app_source)\\n\",\n    \"    with change_dir(d):\\n\",\n    \"        sys.path.insert(0, d)\\n\",\n    \"        from application import kafka_app, IrisInputData, IrisPrediction\\n\",\n    \"\\n\",\n    \"        from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"        msg = IrisInputData(\\n\",\n    \"            sepal_length=0.1,\\n\",\n    \"            sepal_width=0.2,\\n\",\n    \"            petal_length=0.3,\\n\",\n    \"            petal_width=0.4,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        # Start Tester app and create InMemory Kafka broker for testing\\n\",\n    \"        async with Tester(kafka_app) as tester:\\n\",\n    \"            # Send IrisInputData message to input_data topic\\n\",\n    \"            await tester.to_input_data(msg)\\n\",\n    \"\\n\",\n    \"            # Assert that the kafka_app responded with IrisPrediction in predictions topic\\n\",\n    \"            await tester.awaited_mocks.on_predictions.assert_awaited_with(\\n\",\n    \"                IrisPrediction(species=\\\"setosa\\\"), timeout=3\\n\",\n    \"            )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"346d9c8b\",\n   \"metadata\": {},\n   \"source\": [\n    \"In the above code, the `@kafka_app.consumes` decorator sets up a consumer for the \\\"input_data\\\" topic, using the 'json' decoder to convert the message payload to an instance of `IrisInputData`. The `@kafka_app.produces` decorator sets up a producer for the \\\"predictions\\\" topic, using the 'json' encoder to convert the instance of `IrisPrediction` to message payload.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"75aca5c7\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 2. Avro encoder and decoder\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fe4ef914\",\n   \"metadata\": {},\n   \"source\": [\n    \"### What is Avro?\\n\",\n    \"\\n\",\n    \"Avro is a row-oriented remote procedure call and data serialization framework developed within Apache's Hadoop project. It uses JSON for defining data types and protocols, and serializes data in a compact binary format. To learn more about the Apache Avro, please check out the [docs](https://avro.apache.org/docs/).\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"07c8c300\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Installing FastKafka with Avro dependencies\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"`FastKafka` with dependencies for Apache Avro installed is needed to use avro encoder/decoder. Please install `FastKafka` with Avro support using the command - `pip install fastkafka[avro]`\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2ada1754\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Defining Avro Schema Using Pydantic Models\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"By default, you can use Pydantic model to define your message schemas. FastKafka internally takes care of encoding and decoding avro messages, based on the Pydantic models.\\n\",\n    \"\\n\",\n    \"So, similar to the [tutorial](/docs#tutorial), the message schema will remain as it is.\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"# Define Pydantic models for Avro messages\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"No need to change anything to support avro. You can use existing Pydantic models as is.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1fbd3c20\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Reusing existing avro schema\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"If you are using some other library to send and receive avro encoded messages, it is highly likely that you already have an Avro schema defined.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a9e42789\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Building pydantic models from avro schema dictionary\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Let's modify the above example and let's assume we have schemas already for `IrisInputData` and `IrisPrediction` which will look like below:\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"iris_input_data_schema = {\\n\",\n    \"    \\\"type\\\": \\\"record\\\",\\n\",\n    \"    \\\"namespace\\\": \\\"IrisInputData\\\",\\n\",\n    \"    \\\"name\\\": \\\"IrisInputData\\\",\\n\",\n    \"    \\\"fields\\\": [\\n\",\n    \"        {\\\"doc\\\": \\\"Sepal length in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"sepal_length\\\"},\\n\",\n    \"        {\\\"doc\\\": \\\"Sepal width in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"sepal_width\\\"},\\n\",\n    \"        {\\\"doc\\\": \\\"Petal length in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"petal_length\\\"},\\n\",\n    \"        {\\\"doc\\\": \\\"Petal width in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"petal_width\\\"},\\n\",\n    \"    ],\\n\",\n    \"}\\n\",\n    \"iris_prediction_schema = {\\n\",\n    \"    \\\"type\\\": \\\"record\\\",\\n\",\n    \"    \\\"namespace\\\": \\\"IrisPrediction\\\",\\n\",\n    \"    \\\"name\\\": \\\"IrisPrediction\\\",\\n\",\n    \"    \\\"fields\\\": [{\\\"doc\\\": \\\"Predicted species\\\", \\\"type\\\": \\\"string\\\", \\\"name\\\": \\\"species\\\"}],\\n\",\n    \"}\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"We can easily construct pydantic models from avro schema using `avsc_to_pydantic` function which is included as part of `FastKafka` itself.\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"from fastkafka.encoder import avsc_to_pydantic\\n\",\n    \"\\n\",\n    \"IrisInputData = avsc_to_pydantic(iris_input_data_schema)\\n\",\n    \"print(IrisInputData.model_fields)\\n\",\n    \"\\n\",\n    \"IrisPrediction = avsc_to_pydantic(iris_prediction_schema)\\n\",\n    \"print(IrisPrediction.model_fields)\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"The above code will convert avro schema to pydantic models and will print pydantic models' fields. The output of the above is:\\n\",\n    \"\\n\",\n    \"```txt\\n\",\n    \"{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\\n\",\n    \" 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\\n\",\n    \" 'petal_length': ModelField(name='petal_length', type=float, required=True),\\n\",\n    \" 'petal_width': ModelField(name='petal_width', type=float, required=True)}\\n\",\n    \" \\n\",\n    \" {'species': ModelField(name='species', type=str, required=True)}\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This is exactly same as manually defining the pydantic models ourselves. You don't have to worry about not making any mistakes while converting avro schema to pydantic models manually. You can easily and automatically accomplish it by using `avsc_to_pydantic` function as demonstrated above.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"233f4a6b\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Building pydantic models from `.avsc` file\\n\",\n    \"\\n\",\n    \"Not all cases will have avro schema conveniently defined as a python dictionary. You may have it stored as the proprietary `.avsc` files in filesystem. Let's see how to convert those `.avsc` files to pydantic models.\\n\",\n    \"\\n\",\n    \"Let's assume our avro files are stored in files called `iris_input_data_schema.avsc` and `iris_prediction_schema.avsc`. In that case, following code converts the schema to pydantic models:\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"import json\\n\",\n    \"from fastkafka.encoder import avsc_to_pydantic\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with open(\\\"iris_input_data_schema.avsc\\\", \\\"rb\\\") as f:\\n\",\n    \"    iris_input_data_schema = json.load(f)\\n\",\n    \"    \\n\",\n    \"with open(\\\"iris_prediction_schema.avsc\\\", \\\"rb\\\") as f:\\n\",\n    \"    iris_prediction_schema = json.load(f)\\n\",\n    \"    \\n\",\n    \"\\n\",\n    \"IrisInputData = avsc_to_pydantic(iris_input_data_schema)\\n\",\n    \"print(IrisInputData.model_fields)\\n\",\n    \"\\n\",\n    \"IrisPrediction = avsc_to_pydantic(iris_prediction_schema)\\n\",\n    \"print(IrisPrediction.model_fields)\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"191ab3f8\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Consume/Produce avro messages with FastKafka\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"`FastKafka` provides `@consumes` and `@produces` methods to consume/produces messages to/from a `Kafka` topic. This is explained in [tutorial](/docs#function-decorators).\\n\",\n    \"\\n\",\n    \"The `@consumes` and `@produces` methods accepts a parameter called `decoder`/`encoder` to decode/encode avro messages.\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", encoder=\\\"avro\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\", decoder=\\\"avro\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"In the above example, in `@consumes` and `@produces` methods, we explicitly instruct FastKafka to `decode` and `encode` messages using the `avro` `decoder`/`encoder` instead of the default `json` `decoder`/`encoder`.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"44fed866\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Assembling it all together\\n\",\n    \"\\n\",\n    \"Let's rewrite the sample code found in [tutorial](/docs#running-the-service) to use `avro` to `decode` and `encode` messages:\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4923af43\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"# content of the \\\"application.py\\\" file\\n\",\n       \"\\n\",\n       \"from contextlib import asynccontextmanager\\n\",\n       \"\\n\",\n       \"from sklearn.datasets import load_iris\\n\",\n       \"from sklearn.linear_model import LogisticRegression\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"ml_models = {}\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@asynccontextmanager\\n\",\n       \"async def lifespan(app: FastKafka):\\n\",\n       \"    # Load the ML model\\n\",\n       \"    X, y = load_iris(return_X_y=True)\\n\",\n       \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n       \"        X, y\\n\",\n       \"    )\\n\",\n       \"    yield\\n\",\n       \"    # Clean up the ML models and release the resources\\n\",\n       \"    ml_models.clear()\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"iris_input_data_schema = {\\n\",\n       \"    \\\"type\\\": \\\"record\\\",\\n\",\n       \"    \\\"namespace\\\": \\\"IrisInputData\\\",\\n\",\n       \"    \\\"name\\\": \\\"IrisInputData\\\",\\n\",\n       \"    \\\"fields\\\": [\\n\",\n       \"        {\\\"doc\\\": \\\"Sepal length in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"sepal_length\\\"},\\n\",\n       \"        {\\\"doc\\\": \\\"Sepal width in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"sepal_width\\\"},\\n\",\n       \"        {\\\"doc\\\": \\\"Petal length in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"petal_length\\\"},\\n\",\n       \"        {\\\"doc\\\": \\\"Petal width in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"petal_width\\\"},\\n\",\n       \"    ],\\n\",\n       \"}\\n\",\n       \"iris_prediction_schema = {\\n\",\n       \"    \\\"type\\\": \\\"record\\\",\\n\",\n       \"    \\\"namespace\\\": \\\"IrisPrediction\\\",\\n\",\n       \"    \\\"name\\\": \\\"IrisPrediction\\\",\\n\",\n       \"    \\\"fields\\\": [{\\\"doc\\\": \\\"Predicted species\\\", \\\"type\\\": \\\"string\\\", \\\"name\\\": \\\"species\\\"}],\\n\",\n       \"}\\n\",\n       \"# Or load schema from avsc files\\n\",\n       \"\\n\",\n       \"from fastkafka.encoder import avsc_to_pydantic\\n\",\n       \"\\n\",\n       \"IrisInputData = avsc_to_pydantic(iris_input_data_schema)\\n\",\n       \"IrisPrediction = avsc_to_pydantic(iris_prediction_schema)\\n\",\n       \"\\n\",\n       \"    \\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"    },\\n\",\n       \"    \\\"production\\\": {\\n\",\n       \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n       \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n       \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Iris predictions\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \"    lifespan=lifespan,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"@kafka_app.consumes(topic=\\\"input_data\\\", decoder=\\\"avro\\\")\\n\",\n       \"async def on_input_data(msg: IrisInputData):\\n\",\n       \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n       \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n       \"    )[0]\\n\",\n       \"\\n\",\n       \"    await to_predictions(species_class)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces(topic=\\\"predictions\\\", encoder=\\\"avro\\\")\\n\",\n       \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n       \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n       \"\\n\",\n       \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n       \"    return prediction\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"kafka_app_source = \\\"\\\"\\\"\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"iris_input_data_schema = {\\n\",\n    \"    \\\"type\\\": \\\"record\\\",\\n\",\n    \"    \\\"namespace\\\": \\\"IrisInputData\\\",\\n\",\n    \"    \\\"name\\\": \\\"IrisInputData\\\",\\n\",\n    \"    \\\"fields\\\": [\\n\",\n    \"        {\\\"doc\\\": \\\"Sepal length in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"sepal_length\\\"},\\n\",\n    \"        {\\\"doc\\\": \\\"Sepal width in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"sepal_width\\\"},\\n\",\n    \"        {\\\"doc\\\": \\\"Petal length in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"petal_length\\\"},\\n\",\n    \"        {\\\"doc\\\": \\\"Petal width in cm\\\", \\\"type\\\": \\\"double\\\", \\\"name\\\": \\\"petal_width\\\"},\\n\",\n    \"    ],\\n\",\n    \"}\\n\",\n    \"iris_prediction_schema = {\\n\",\n    \"    \\\"type\\\": \\\"record\\\",\\n\",\n    \"    \\\"namespace\\\": \\\"IrisPrediction\\\",\\n\",\n    \"    \\\"name\\\": \\\"IrisPrediction\\\",\\n\",\n    \"    \\\"fields\\\": [{\\\"doc\\\": \\\"Predicted species\\\", \\\"type\\\": \\\"string\\\", \\\"name\\\": \\\"species\\\"}],\\n\",\n    \"}\\n\",\n    \"# Or load schema from avsc files\\n\",\n    \"\\n\",\n    \"from fastkafka.encoder import avsc_to_pydantic\\n\",\n    \"\\n\",\n    \"IrisInputData = avsc_to_pydantic(iris_input_data_schema)\\n\",\n    \"IrisPrediction = avsc_to_pydantic(iris_prediction_schema)\\n\",\n    \"\\n\",\n    \"    \\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", decoder=\\\"avro\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\", encoder=\\\"avro\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Markdown(\\n\",\n    \"    f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file\\n\",\n    \"{kafka_app_source}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9bd7e2c0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-07-05 08:19:27.787 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-07-05 08:19:27.788 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-07-05 08:19:27.799 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-05 08:19:27.800 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-05 08:19:27.809 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-05 08:19:27.810 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-05 08:19:27.810 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-05 08:19:27.811 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-05 08:19:27.811 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-05 08:19:27.811 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-05 08:19:27.812 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-05 08:19:27.812 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\\n\",\n      \"23-07-05 08:19:27.812 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-05 08:19:27.812 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-05 08:19:27.813 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-05 08:19:27.813 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-05 08:19:27.813 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-05 08:19:27.814 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-05 08:19:27.814 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-07-05 08:19:27.814 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-05 08:19:31.811 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-05 08:19:31.812 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-05 08:19:31.812 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-05 08:19:31.812 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-05 08:19:31.813 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-05 08:19:31.813 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-05 08:19:31.814 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-05 08:19:31.814 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-05 08:19:31.814 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    src_path = Path(d) / \\\"application.py\\\"\\n\",\n    \"    with open(src_path, \\\"w\\\") as source:\\n\",\n    \"        source.write(kafka_app_source)\\n\",\n    \"    with change_dir(d):\\n\",\n    \"        sys.path.insert(0, d)\\n\",\n    \"        from application import kafka_app, IrisInputData, IrisPrediction\\n\",\n    \"\\n\",\n    \"        from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"        msg = IrisInputData(\\n\",\n    \"            sepal_length=0.1,\\n\",\n    \"            sepal_width=0.2,\\n\",\n    \"            petal_length=0.3,\\n\",\n    \"            petal_width=0.4,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        # Start Tester app and create InMemory Kafka broker for testing\\n\",\n    \"        async with Tester(kafka_app) as tester:\\n\",\n    \"            # Send IrisInputData message to input_data topic\\n\",\n    \"            await tester.to_input_data(msg)\\n\",\n    \"\\n\",\n    \"            # Assert that the kafka_app responded with IrisPrediction in predictions topic\\n\",\n    \"            await tester.awaited_mocks.on_predictions.assert_awaited_with(\\n\",\n    \"                IrisPrediction(species=\\\"setosa\\\"), timeout=3\\n\",\n    \"            )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"11596544\",\n   \"metadata\": {},\n   \"source\": [\n    \"The above code is a sample implementation of using FastKafka to consume and produce Avro-encoded messages from/to a Kafka topic. The code defines two Avro schemas for the input data and the prediction result. It then uses the `avsc_to_pydantic` function from the FastKafka library to convert the Avro schema into Pydantic models, which will be used to decode and encode Avro messages.\\n\",\n    \"\\n\",\n    \"The `FastKafka` class is then instantiated with the broker details, and two functions decorated with `@kafka_app.consumes` and `@kafka_app.produces` are defined to consume messages from the \\\"input_data\\\" topic and produce messages to the \\\"predictions\\\" topic, respectively. The functions uses the decoder=\\\"avro\\\" and encoder=\\\"avro\\\" parameters to decode and encode the Avro messages.\\n\",\n    \"\\n\",\n    \"In summary, the above code demonstrates a straightforward way to use Avro-encoded messages with FastKafka to build a message processing pipeline.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1c45a7f2\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 3. Custom encoder and decoder\\n\",\n    \"\\n\",\n    \"If you are not happy with the json or avro encoder/decoder options, you can write your own encoder/decoder functions and use them to encode/decode Pydantic messages.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"afde8fe2\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Writing a custom encoder and decoder\\n\",\n    \"\\n\",\n    \"In this section, let's see how to write a custom encoder and decoder which obfuscates kafka message with simple [ROT13](https://en.wikipedia.org/wiki/ROT13) cipher.\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"import codecs\\n\",\n    \"import json\\n\",\n    \"from typing import Any, Type\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def custom_encoder(msg: BaseModel) -> bytes:\\n\",\n    \"    msg_str = msg.json()\\n\",\n    \"    obfuscated = codecs.encode(msg_str, 'rot13')\\n\",\n    \"    raw_bytes = obfuscated.encode(\\\"utf-8\\\")\\n\",\n    \"    return raw_bytes\\n\",\n    \"\\n\",\n    \"def custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\\n\",\n    \"    obfuscated = raw_msg.decode(\\\"utf-8\\\")\\n\",\n    \"    msg_str = codecs.decode(obfuscated, 'rot13')\\n\",\n    \"    msg_dict = json.loads(msg_str)\\n\",\n    \"    return cls(**msg_dict)\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"The above code defines two custom functions for encoding and decoding messages in a Kafka application using the FastKafka library. \\n\",\n    \"\\n\",\n    \"The encoding function, `custom_encoder()`, takes a message `msg` which is an instance of a Pydantic model, converts it to a JSON string using the `json()` method, obfuscates the resulting string using the ROT13 algorithm from the `codecs` module, and finally encodes the obfuscated string as raw bytes using the UTF-8 encoding. \\n\",\n    \"\\n\",\n    \"The decoding function, `custom_decoder()`, takes a raw message `raw_msg` in bytes format, a Pydantic class to construct instance with cls parameter. It first decodes the raw message from UTF-8 encoding, then uses the ROT13 algorithm to de-obfuscate the string. Finally, it loads the resulting JSON string using the `json.loads()` method and returns a new instance of the specified `cls` class initialized with the decoded dictionary. \\n\",\n    \"\\n\",\n    \"These functions can be used with FastKafka's `encoder` and `decoder` parameters to customize the serialization and deserialization of messages in Kafka topics.\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Let's test the above code\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\\n\",\n    \"\\n\",\n    \"encoded = custom_encoder(i)\\n\",\n    \"display(encoded)\\n\",\n    \"\\n\",\n    \"decoded = custom_decoder(encoded, IrisInputData)\\n\",\n    \"display(decoded)\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This will result in following output\\n\",\n    \"\\n\",\n    \"```txt\\n\",\n    \"b'{\\\"frcny_yratgu\\\": 0.5, \\\"frcny_jvqgu\\\": 0.5, \\\"crgny_yratgu\\\": 0.5, \\\"crgny_jvqgu\\\": 0.5}'\\n\",\n    \"\\n\",\n    \"IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"20fe6ff6\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Assembling it all together\\n\",\n    \"\\n\",\n    \"Let's rewrite the sample code found in [tutorial](/docs#running-the-service) to use our custom decoder and encoder functions:\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"215d6e0e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"# content of the \\\"application.py\\\" file\\n\",\n       \"\\n\",\n       \"from contextlib import asynccontextmanager\\n\",\n       \"\\n\",\n       \"from sklearn.datasets import load_iris\\n\",\n       \"from sklearn.linear_model import LogisticRegression\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"ml_models = {}\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@asynccontextmanager\\n\",\n       \"async def lifespan(app: FastKafka):\\n\",\n       \"    # Load the ML model\\n\",\n       \"    X, y = load_iris(return_X_y=True)\\n\",\n       \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n       \"        X, y\\n\",\n       \"    )\\n\",\n       \"    yield\\n\",\n       \"    # Clean up the ML models and release the resources\\n\",\n       \"    ml_models.clear()\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n       \"\\n\",\n       \"class IrisInputData(BaseModel):\\n\",\n       \"    sepal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    sepal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_length: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n       \"    )\\n\",\n       \"    petal_width: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"class IrisPrediction(BaseModel):\\n\",\n       \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"import codecs\\n\",\n       \"import json\\n\",\n       \"from typing import Any, Type\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"def custom_encoder(msg: BaseModel) -> bytes:\\n\",\n       \"    msg_str = msg.json()\\n\",\n       \"    obfuscated = codecs.encode(msg_str, 'rot13')\\n\",\n       \"    raw_bytes = obfuscated.encode(\\\"utf-8\\\")\\n\",\n       \"    return raw_bytes\\n\",\n       \"\\n\",\n       \"def custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\\n\",\n       \"    obfuscated = raw_msg.decode(\\\"utf-8\\\")\\n\",\n       \"    msg_str = codecs.decode(obfuscated, 'rot13')\\n\",\n       \"    msg_dict = json.loads(msg_str)\\n\",\n       \"    return cls(**msg_dict)\\n\",\n       \"\\n\",\n       \"    \\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"    },\\n\",\n       \"    \\\"production\\\": {\\n\",\n       \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n       \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n       \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Iris predictions\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \"    lifespan=lifespan,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"@kafka_app.consumes(topic=\\\"input_data\\\", decoder=custom_decoder)\\n\",\n       \"async def on_input_data(msg: IrisInputData):\\n\",\n       \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n       \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n       \"    )[0]\\n\",\n       \"\\n\",\n       \"    await to_predictions(species_class)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces(topic=\\\"predictions\\\", encoder=custom_encoder)\\n\",\n       \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n       \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n       \"\\n\",\n       \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n       \"    return prediction\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"kafka_app_source = \\\"\\\"\\\"\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"import codecs\\n\",\n    \"import json\\n\",\n    \"from typing import Any, Type\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def custom_encoder(msg: BaseModel) -> bytes:\\n\",\n    \"    msg_str = msg.json()\\n\",\n    \"    obfuscated = codecs.encode(msg_str, 'rot13')\\n\",\n    \"    raw_bytes = obfuscated.encode(\\\"utf-8\\\")\\n\",\n    \"    return raw_bytes\\n\",\n    \"\\n\",\n    \"def custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\\n\",\n    \"    obfuscated = raw_msg.decode(\\\"utf-8\\\")\\n\",\n    \"    msg_str = codecs.decode(obfuscated, 'rot13')\\n\",\n    \"    msg_dict = json.loads(msg_str)\\n\",\n    \"    return cls(**msg_dict)\\n\",\n    \"\\n\",\n    \"    \\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", decoder=custom_decoder)\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\", encoder=custom_encoder)\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Markdown(\\n\",\n    \"    f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file\\n\",\n    \"{kafka_app_source}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"94623894\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-07-05 08:19:31.838 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-07-05 08:19:31.838 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-07-05 08:19:31.853 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-05 08:19:31.853 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-05 08:19:31.867 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-05 08:19:31.867 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-05 08:19:31.868 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-05 08:19:31.868 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-05 08:19:31.869 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-05 08:19:31.869 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-05 08:19:31.869 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-05 08:19:31.869 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\\n\",\n      \"23-07-05 08:19:31.870 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-05 08:19:31.870 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-05 08:19:31.870 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-05 08:19:31.870 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-05 08:19:31.871 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-05 08:19:31.871 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-05 08:19:31.871 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-07-05 08:19:31.871 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-05 08:19:35.868 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-05 08:19:35.869 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-05 08:19:35.869 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-05 08:19:35.870 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-05 08:19:35.870 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-05 08:19:35.870 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-05 08:19:35.870 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-05 08:19:35.871 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-05 08:19:35.871 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with TemporaryDirectory() as d:\\n\",\n    \"    src_path = Path(d) / \\\"application.py\\\"\\n\",\n    \"    with open(src_path, \\\"w\\\") as source:\\n\",\n    \"        source.write(kafka_app_source)\\n\",\n    \"    with change_dir(d):\\n\",\n    \"        sys.path.insert(0, d)\\n\",\n    \"        from application import kafka_app, IrisInputData, IrisPrediction\\n\",\n    \"\\n\",\n    \"        from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"        msg = IrisInputData(\\n\",\n    \"            sepal_length=0.1,\\n\",\n    \"            sepal_width=0.2,\\n\",\n    \"            petal_length=0.3,\\n\",\n    \"            petal_width=0.4,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        # Start Tester app and create InMemory Kafka broker for testing\\n\",\n    \"        async with Tester(kafka_app) as tester:\\n\",\n    \"            # Send IrisInputData message to input_data topic\\n\",\n    \"            await tester.to_input_data(msg)\\n\",\n    \"\\n\",\n    \"            # Assert that the kafka_app responded with IrisPrediction in predictions topic\\n\",\n    \"            await tester.awaited_mocks.on_predictions.assert_awaited_with(\\n\",\n    \"                IrisPrediction(species=\\\"setosa\\\"), timeout=3\\n\",\n    \"            )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"07d7dfde\",\n   \"metadata\": {},\n   \"source\": [\n    \"This code defines a custom encoder and decoder functions for encoding and decoding messages sent through a Kafka messaging system. \\n\",\n    \"\\n\",\n    \"The custom `encoder` function takes a message represented as a `BaseModel` and encodes it as bytes by first converting it to a JSON string and then obfuscating it using the ROT13 encoding. The obfuscated message is then converted to bytes using UTF-8 encoding and returned.\\n\",\n    \"\\n\",\n    \"The custom `decoder` function takes in the bytes representing an obfuscated message, decodes it using UTF-8 encoding, then decodes the ROT13 obfuscation, and finally loads it as a dictionary using the `json` module. This dictionary is then converted to a `BaseModel` instance using the cls parameter.\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_11_Consumes_Basics.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"84b22f68\",\n   \"metadata\": {},\n   \"source\": [\n    \"# @consumes basics\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a35b05c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import platform\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import run_and_match\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8c655e4f\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can use `@consumes` decorator to consume messages from Kafka topics. \\n\",\n    \"\\n\",\n    \"In this guide we will create a simple FastKafka app that will consume `HelloWorld` messages from hello_world topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"18535f2f\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Import `FastKafka`\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b4ef969d\",\n   \"metadata\": {},\n   \"source\": [\n    \"To use the `@consumes` decorator, first we need to import the base FastKafka app to create our application.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3d6a8fae\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_fastkafka = \\\"\\\"\\\"from fastkafka import FastKafka\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{import_fastkafka}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c2e9cb9f\",\n   \"metadata\": {},\n   \"source\": [\n    \"In this demo we will log the messages to the output so that we can inspect and verify that our app is consuming properly. For that we need to import the logger.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2a27e68b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka._components.logger import get_logger\\n\",\n       \"\\n\",\n       \"logger = get_logger(__name__)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_logger = \\\"\\\"\\\"from fastkafka._components.logger import get_logger\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{import_logger}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"09cb37e9\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Define the structure of the messages\\n\",\n    \"Next, you need to define the structure of the messages you want to consume from the topic using [pydantic](https://docs.pydantic.dev/). For the guide we'll stick to something basic, but you are free to define any complex message structure you wish in your project, just make sure it can be JSON encoded.\\n\",\n    \"\\n\",\n    \"Let's import `BaseModel` and `Field` from pydantic and create a simple `HelloWorld` class containing one string parameter `msg`\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f83265a1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_pydantic = \\\"\\\"\\\"from pydantic import BaseModel, Field\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{import_pydantic}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9c1e6ec8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"define_HelloWorld = \\\"\\\"\\\"class HelloWorld(BaseModel):\\n\",\n    \"    msg: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"Hello\\\",\\n\",\n    \"        description=\\\"Demo hello world message\\\",\\n\",\n    \"    )\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{define_HelloWorld}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fc1d810c\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Create a base FastKafka app\\n\",\n    \"\\n\",\n    \"Now we will create and define a base FastKafka app, replace the `<url_of_your_kafka_bootstrap_server>` and `<port_of_your_kafka_bootstrap_server>` with the actual values of your Kafka bootstrap server\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2732642f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"create_app = \\\"\\\"\\\"kafka_brokers = {\\n\",\n    \"    \\\"demo_broker\\\": {\\n\",\n    \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n    \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{create_app}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6e41ebe6\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Create a consumer function and decorate it with `@consumes`\\n\",\n    \"\\n\",\n    \"Let's create a consumer function that will consume `HelloWorld` messages from *hello_world* topic and log them.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"93dd7a11\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"@app.consumes()\\n\",\n       \"async def on_hello_world(msg: HelloWorld):\\n\",\n       \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"decorate_consumes = \\\"\\\"\\\"@app.consumes()\\n\",\n    \"async def on_hello_world(msg: HelloWorld):\\n\",\n    \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_consumes}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9bdf5198\",\n   \"metadata\": {},\n   \"source\": [\n    \"The function decorated with the `@consumes` decorator will be called when a message is produced to Kafka.\\n\",\n    \"\\n\",\n    \"The message will then be injected into the typed *msg* argument of the function and its type will be used to parse the message.\\n\",\n    \"\\n\",\n    \"In this example case, when the message is sent into a *hello_world* topic, it will be parsed into a HelloWorld class and `on_hello_world` function will be called with the parsed class as *msg* argument value.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b68c95f7\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Final app\\n\",\n    \"\\n\",\n    \"Your app code should look like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cd7d88d3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"from fastkafka._components.logger import get_logger\\n\",\n       \"\\n\",\n       \"logger = get_logger(__name__)\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"@app.consumes()\\n\",\n       \"async def on_hello_world(msg: HelloWorld):\\n\",\n       \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"consumes_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_logger\\n\",\n    \"    + \\\"\\\\n\\\"\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + \\\"\\\\n\\\"\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_consumes\\n\",\n    \")\\n\",\n    \"md(f\\\"```python\\\\n{consumes_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8fa74215\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Run the app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"afe529ab\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"Now we can run the app. Copy the code above in consumer_example.py and run it by running\\n\",\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_file = \\\"consumer_example.py\\\"\\n\",\n    \"filename = script_file.split(\\\".py\\\")[0]\\n\",\n    \"cmd = f\\\"fastkafka run --num-workers=1 --kafka-broker=demo_broker {filename}:app\\\"\\n\",\n    \"md(\\n\",\n    \"    f\\\"Now we can run the app. Copy the code above in {script_file} and run it by running\\\\n```shell\\\\n{cmd}\\\\n```\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e66be2b9\",\n   \"metadata\": {},\n   \"source\": [\n    \"After running the command, you should see this output in your terminal:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"335ec62d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-15 07:15:54.167 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-15 07:15:54.168 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-15 07:15:54.169 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-15 07:15:54.170 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-15 07:15:54.170 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-15 07:15:54.171 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-15 07:15:54.171 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-15 07:15:54.172 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-06-15 07:15:55.001 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-15 07:15:57.462 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"23-06-15 07:15:59.432 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"23-06-15 07:15:59.432 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-15 07:16:05.941 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-15 07:16:05.942 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 13679...\\n\",\n      \"23-06-15 07:16:07.588 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 13679 terminated.\\n\",\n      \"23-06-15 07:16:07.588 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 13319...\\n\",\n      \"23-06-15 07:16:08.935 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 13319 terminated.\\n\",\n      \"23-06-15 07:16:08.940 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=11092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=consumes_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, exit_code\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e35f6e21\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\\n\",\n      \"[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\\n\",\n      \"[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\\n\",\n      \"[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[14442]: 23-06-15 07:16:00.585 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"23-06-15 07:16:04.626 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14442...\\n\",\n      \"[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-15 07:16:05.853 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14442 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d4ec6dab\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Send the message to kafka topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c81e65bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets send a `HelloWorld` message to the *hello_world* topic and check if our consumer kafka application has logged the received message. In your terminal, run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6ef181f6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"echo { \\\\\\\"msg\\\\\\\": \\\\\\\"Hello world\\\\\\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_extension = \\\".bat\\\" if platform.system() == \\\"Windows\\\" else \\\".sh\\\"\\n\",\n    \"escape_char = \\\"^\\\" if platform.system() == \\\"Windows\\\" else \\\"\\\\\\\\\\\"\\n\",\n    \"\\n\",\n    \"kafka_msg = 'echo {{ {escape_char}\\\"msg{escape_char}\\\": {escape_char}\\\"Hello world{escape_char}\\\" }}'.format(escape_char=escape_char)\\n\",\n    \"\\n\",\n    \"producer_cmd = f'{kafka_msg} | kafka-console-producer'+script_extension+' --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>'\\n\",\n    \"md(f\\\"```shell\\\\n{producer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a66904c8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-15 07:16:08.976 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-15 07:16:08.977 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-15 07:16:08.978 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-15 07:16:08.979 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-15 07:16:08.980 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-15 07:16:08.981 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-06-15 07:16:09.846 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-15 07:16:12.214 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"23-06-15 07:16:14.113 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"23-06-15 07:16:14.115 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-15 07:16:35.563 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-15 07:16:35.563 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14824...\\n\",\n      \"23-06-15 07:16:37.212 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14824 terminated.\\n\",\n      \"23-06-15 07:16:37.212 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14462...\\n\",\n      \"23-06-15 07:16:38.560 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14462 terminated.\\n\",\n      \"23-06-15 07:16:38.567 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"consumes_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_logger\\n\",\n    \"    + \\\"\\\\n\\\"\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + \\\"\\\\n\\\"\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_consumes\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=11092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    async with asyncer.create_task_group() as task_group:\\n\",\n    \"        server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"        server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"        consumer_task = task_group.soonify(run_script_and_cancel)(\\n\",\n    \"            script=consumes_example.replace(\\n\",\n    \"                \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"            ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"            script_file=script_file,\\n\",\n    \"            cmd=cmd,\\n\",\n    \"            cancel_after=20,\\n\",\n    \"        )\\n\",\n    \"        await asyncio.sleep(10)\\n\",\n    \"\\n\",\n    \"        producer_task = task_group.soonify(asyncio.create_subprocess_shell)(\\n\",\n    \"            cmd=producer_cmd.replace(\\n\",\n    \"                \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"            ),\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"assert \\\"Got msg: msg='Hello world'\\\" in consumer_task.value[1].decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e86e202e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\\n\",\n      \"[15588]: 23-06-15 07:16:15.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[15588]: 23-06-15 07:16:15.294 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\\n\",\n      \"[15588]: 23-06-15 07:16:15.295 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\\n\",\n      \"[15588]: 23-06-15 07:16:15.295 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[15588]: 23-06-15 07:16:15.302 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \\n\",\n      \"[15588]: 23-06-15 07:16:25.867 [INFO] consumer_example: Got msg: msg='Hello world'\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"23-06-15 07:16:34.168 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 15588...\\n\",\n      \"[15588]: 23-06-15 07:16:35.358 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[15588]: 23-06-15 07:16:35.359 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-15 07:16:35.475 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 15588 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(consumer_task.value[1].decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9292901c\",\n   \"metadata\": {},\n   \"source\": [\n    \"You should see the \\\"Got msg: msg=\\\\'Hello world\\\\'\\\" being logged by your consumer.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"49b9c46e\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Choosing a topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"ed9e5f0c\",\n   \"metadata\": {},\n   \"source\": [\n    \"You probably noticed that you didn't define which topic you are receiving the message from, this is because the `@consumes` decorator determines the topic by default from your function name.\\n\",\n    \"The decorator will take your function name and strip the default \\\"on_\\\" prefix from it and use the rest as the topic name. In this example case, the topic is *hello_world*.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"dc3e9f8a\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can choose your custom prefix by defining the `prefix` parameter in consumes decorator, like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"da734ca4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"@app.consumes(prefix=\\\"read_from_\\\")\\n\",\n       \"async def read_from_hello_world(msg: HelloWorld):\\n\",\n       \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: False\\n\",\n    \"\\n\",\n    \"decorate_consumes_prefix = \\\"\\\"\\\"@app.consumes(prefix=\\\"read_from_\\\")\\n\",\n    \"async def read_from_hello_world(msg: HelloWorld):\\n\",\n    \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_consumes_prefix}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b7563b45\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-15 07:17:44.438 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-15 07:17:44.439 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-15 07:17:44.440 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-15 07:17:44.440 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-15 07:17:44.441 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-15 07:17:44.442 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-06-15 07:17:45.272 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-15 07:17:47.608 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"23-06-15 07:17:49.517 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"23-06-15 07:17:49.518 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-15 07:18:10.834 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-15 07:18:10.834 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 16312...\\n\",\n      \"23-06-15 07:18:12.489 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 16312 terminated.\\n\",\n      \"23-06-15 07:18:12.490 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 15951...\\n\",\n      \"23-06-15 07:18:13.836 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 15951 terminated.\\n\",\n      \"23-06-15 07:18:13.845 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"consumes_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_logger\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_consumes_prefix\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=11092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    async with asyncer.create_task_group() as task_group:\\n\",\n    \"        server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"        server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"        consumer_task = task_group.soonify(run_script_and_cancel)(\\n\",\n    \"            script=consumes_example.replace(\\n\",\n    \"                \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"            ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"            script_file=script_file,\\n\",\n    \"            cmd=cmd,\\n\",\n    \"            cancel_after=20,\\n\",\n    \"        )\\n\",\n    \"        await asyncio.sleep(10)\\n\",\n    \"\\n\",\n    \"        producer_task = task_group.soonify(asyncio.create_subprocess_shell)(\\n\",\n    \"            cmd=producer_cmd.replace(\\n\",\n    \"                \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"            ),\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"assert \\\"Got msg: msg='Hello world'\\\" in consumer_task.value[1].decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0c4225dd\",\n   \"metadata\": {},\n   \"source\": [\n    \"Also, you can define the topic name completely by defining the `topic` in parameter in consumes decorator, like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9bf881b6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"@app.consumes(topic=\\\"my_special_topic\\\")\\n\",\n       \"async def on_hello_world(msg: HelloWorld):\\n\",\n       \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"decorate_consumes_topic = \\\"\\\"\\\"@app.consumes(topic=\\\"my_special_topic\\\")\\n\",\n    \"async def on_hello_world(msg: HelloWorld):\\n\",\n    \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_consumes_topic}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"612baa8c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-15 07:18:13.874 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-15 07:18:13.874 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-15 07:18:13.875 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-15 07:18:13.877 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-15 07:18:13.878 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-15 07:18:13.879 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-06-15 07:18:14.717 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-15 07:18:17.045 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"23-06-15 07:18:18.971 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"23-06-15 07:18:18.972 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-15 07:18:40.425 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-15 07:18:40.426 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 17799...\\n\",\n      \"23-06-15 07:18:42.046 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 17799 terminated.\\n\",\n      \"23-06-15 07:18:42.046 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 17438...\\n\",\n      \"23-06-15 07:18:43.396 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 17438 terminated.\\n\",\n      \"23-06-15 07:18:43.401 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"consumes_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_logger\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_consumes_topic\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"my_special_topic\\\"], apply_nest_asyncio=True, listener_port=11092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    async with asyncer.create_task_group() as task_group:\\n\",\n    \"        server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"        server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"        consumer_task = task_group.soonify(run_script_and_cancel)(\\n\",\n    \"            script=consumes_example.replace(\\n\",\n    \"                \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"            ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"            script_file=script_file,\\n\",\n    \"            cmd=cmd,\\n\",\n    \"            cancel_after=20,\\n\",\n    \"        )\\n\",\n    \"        await asyncio.sleep(10)\\n\",\n    \"\\n\",\n    \"        producer_task = task_group.soonify(asyncio.create_subprocess_shell)(\\n\",\n    \"            cmd=producer_cmd.replace(\\n\",\n    \"                \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"            ).replace(\\\"topic=hello_world\\\", \\\"topic=my_special_topic\\\"),\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"assert \\\"Got msg: msg='Hello world'\\\" in consumer_task.value[1].decode(\\n\",\n    \"    \\\"UTF-8\\\"\\n\",\n    \"), consumer_task.value[1].decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"da50972c\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Message data\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"7e4e8052\",\n   \"metadata\": {},\n   \"source\": [\n    \"The message received from kafka is translated from binary JSON representation int the class defined by typing of *msg* parameter in the function decorated by the `@consumes` decorator.\\n\",\n    \"\\n\",\n    \"In this example case, the message will be parsed into a `HelloWorld` class.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8763a137\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Message metadata\\n\",\n    \"\\n\",\n    \"If you need any of Kafka message metadata such as timestamp, partition or headers you can access the metadata by adding a EventMetadata typed argument to your consumes function and the metadata from the incoming message will be automatically injected when calling the consumes function.\\n\",\n    \"\\n\",\n    \"Let's demonstrate that.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"cbde9875\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Create a consumer function with metadata\\n\",\n    \"\\n\",\n    \"The only difference from the original basic consume function is that we are now passing the `meta: EventMetadata` argument to the function. The `@consumes` decorator will register that and, when a message is consumed, it will also pass the metadata to your function. Now you can use the metadata in your consume function. \\n\",\n    \"Lets log it to see what it contains.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"206a4c79\",\n   \"metadata\": {},\n   \"source\": [\n    \"First, we need to import the EventMetadata\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9bc607d9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import EventMetadata\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_event_metadata = \\\"\\\"\\\"from fastkafka import EventMetadata\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{import_event_metadata}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"006804ea\",\n   \"metadata\": {},\n   \"source\": [\n    \"Now we can add the `meta` argument to our consuming function.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e3d029e0\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"@app.consumes()\\n\",\n       \"async def on_hello_world(msg: HelloWorld, meta: EventMetadata):\\n\",\n       \"    logger.info(f\\\"Got metadata: {meta}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"decorate_consumes_meta = \\\"\\\"\\\"@app.consumes()\\n\",\n    \"async def on_hello_world(msg: HelloWorld, meta: EventMetadata):\\n\",\n    \"    logger.info(f\\\"Got metadata: {meta}\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_consumes_meta}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d9df2e13\",\n   \"metadata\": {},\n   \"source\": [\n    \"Your final app should look like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"90b842f7\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"from fastkafka import EventMetadata\\n\",\n       \"from fastkafka._components.logger import get_logger\\n\",\n       \"\\n\",\n       \"logger = get_logger(__name__)\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"@app.consumes()\\n\",\n       \"async def on_hello_world(msg: HelloWorld, meta: EventMetadata):\\n\",\n       \"    logger.info(f\\\"Got metadata: {meta}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"consumes_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_event_metadata\\n\",\n    \"    + import_logger\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_consumes_meta\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{consumes_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9143036a\",\n   \"metadata\": {},\n   \"source\": [\n    \"Now lets run the app and send a message to the broker to see the logged message metadata.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a6d637ad\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-15 07:18:49.398 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-15 07:18:49.399 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-15 07:18:49.400 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-15 07:18:49.401 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-15 07:18:49.401 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-15 07:18:49.402 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-06-15 07:18:50.243 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-15 07:18:52.575 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"23-06-15 07:18:54.527 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"23-06-15 07:18:54.528 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-15 07:19:15.835 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-15 07:19:15.836 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 19287...\\n\",\n      \"23-06-15 07:19:17.454 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 19287 terminated.\\n\",\n      \"23-06-15 07:19:17.455 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 18925...\\n\",\n      \"23-06-15 07:19:18.802 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 18925 terminated.\\n\",\n      \"23-06-15 07:19:18.810 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=11092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    async with asyncer.create_task_group() as task_group:\\n\",\n    \"        server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"        server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"        consumer_task = task_group.soonify(run_script_and_cancel)(\\n\",\n    \"            script=consumes_example.replace(\\n\",\n    \"                \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"            ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"            script_file=script_file,\\n\",\n    \"            cmd=cmd,\\n\",\n    \"            cancel_after=20,\\n\",\n    \"        )\\n\",\n    \"        await asyncio.sleep(10)\\n\",\n    \"\\n\",\n    \"        producer_task = task_group.soonify(asyncio.create_subprocess_shell)(\\n\",\n    \"            cmd=producer_cmd.replace(\\n\",\n    \"                \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"            ),\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"assert \\\"Got metadata: \\\" in consumer_task.value[1].decode(\\n\",\n    \"    \\\"UTF-8\\\"\\n\",\n    \"), consumer_task.value[1].decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"74082867\",\n   \"metadata\": {},\n   \"source\": [\n    \"You should see a similar log as the one below and the metadata being logged in your app.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"4f4e6d4c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\\n\",\n      \"[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\\n\",\n      \"[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\\n\",\n      \"[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[20050]: 23-06-15 07:18:55.682 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \\n\",\n      \"[20050]: 23-06-15 07:19:06.337 [INFO] consumer_example: Got metadata: EventMetadata(topic='hello_world', partition=0, offset=0, timestamp=1686813546255, timestamp_type=0, key=None, value=b'{ \\\"msg\\\": \\\"Hello world\\\" }', checksum=None, serialized_key_size=-1, serialized_value_size=24, headers=())\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"23-06-15 07:19:14.547 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20050...\\n\",\n      \"[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-15 07:19:15.742 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20050 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(consumer_task.value[1].decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"5252b37a\",\n   \"metadata\": {},\n   \"source\": [\n    \"As you can see in the log, from the metadata you now have the information about the partition, offset, timestamp, key and headers. :tada:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"46bb2a38\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Dealing with high latency consuming functions\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2441facc\",\n   \"metadata\": {},\n   \"source\": [\n    \"If your functions have high latency due to, for example, lengthy database calls you will notice a big decrease in performance. This is due to the issue of how the consumes decorator executes your consume functions when consuming events. By default, the consume function will run the consuming funtions for one topic sequentially, this is the most straightforward approach and results with the least amount of overhead.\\n\",\n    \"\\n\",\n    \"But, to handle those high latency tasks and run them in parallel, FastKafka has a `DynamicTaskExecutor` prepared for your consumers. This executor comes with additional overhead, so use it only when you need to handle high latency functions.\\n\",\n    \"\\n\",\n    \"Lets demonstrate how to use it.\\n\",\n    \"\\n\",\n    \"To your consumes decorator, add an `executor` option and set it to `\\\"DynamicTaskExecutor\\\"`, this will enable the consumer to handle high latency functions effectively.\\n\",\n    \"\\n\",\n    \"Your consuming function should now look like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"90e6743b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"@app.consumes(executor=\\\"DynamicTaskExecutor\\\")\\n\",\n       \"async def on_hello_world(msg: HelloWorld):\\n\",\n       \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"decorate_consumes_executor = \\\"\\\"\\\"@app.consumes(executor=\\\"DynamicTaskExecutor\\\")\\n\",\n    \"async def on_hello_world(msg: HelloWorld):\\n\",\n    \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_consumes_executor}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d0435fcf\",\n   \"metadata\": {},\n   \"source\": [\n    \"And the complete app should now look like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d01ad105\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"from fastkafka._components.logger import get_logger\\n\",\n       \"\\n\",\n       \"logger = get_logger(__name__)\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"@app.consumes(executor=\\\"DynamicTaskExecutor\\\")\\n\",\n       \"async def on_hello_world(msg: HelloWorld):\\n\",\n       \"    logger.info(f\\\"Got msg: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"consumes_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_logger\\n\",\n    \"    + \\\"\\\\n\\\"\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + \\\"\\\\n\\\"\\n\",\n    \"    + create_app\\n\",\n    \"    + \\\"\\\\n\\\"\\n\",\n    \"    + decorate_consumes_executor\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{consumes_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4293d59e\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can now run your app using the CLI commands described in this guide.\\n\",\n    \"\\n\",\n    \"Lets send a `HelloWorld` message to the *hello_world* topic and check if our consumer kafka application has logged the received message. In your terminal, run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fedc1e5c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"echo { \\\\\\\"msg\\\\\\\": \\\\\\\"Hello world\\\\\\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_extension = \\\".bat\\\" if platform.system() == \\\"Windows\\\" else \\\".sh\\\"\\n\",\n    \"escape_char = \\\"^\\\" if platform.system() == \\\"Windows\\\" else \\\"\\\\\\\\\\\"\\n\",\n    \"\\n\",\n    \"kafka_msg = 'echo {{ {escape_char}\\\"msg{escape_char}\\\": {escape_char}\\\"Hello world{escape_char}\\\" }}'.format(escape_char=escape_char)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"producer_cmd = f'{kafka_msg} | kafka-console-producer'+script_extension+' --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>'\\n\",\n    \"md(f\\\"```shell\\\\n{producer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"53c47569\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-15 07:19:18.845 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-15 07:19:18.847 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-15 07:19:18.847 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-15 07:19:18.848 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-15 07:19:18.849 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-15 07:19:18.849 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-06-15 07:19:19.677 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-15 07:19:22.048 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"23-06-15 07:19:23.954 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"23-06-15 07:19:23.955 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-15 07:19:45.396 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-15 07:19:45.397 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20774...\\n\",\n      \"23-06-15 07:19:47.050 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20774 terminated.\\n\",\n      \"23-06-15 07:19:47.050 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20413...\\n\",\n      \"23-06-15 07:19:48.397 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20413 terminated.\\n\",\n      \"23-06-15 07:19:48.404 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"consumes_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + import_logger\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_consumes_executor\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=11092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    async with asyncer.create_task_group() as task_group:\\n\",\n    \"        server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"        server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"        consumer_task = task_group.soonify(run_script_and_cancel)(\\n\",\n    \"            script=consumes_example.replace(\\n\",\n    \"                \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"            ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"            script_file=script_file,\\n\",\n    \"            cmd=cmd,\\n\",\n    \"            cancel_after=20,\\n\",\n    \"        )\\n\",\n    \"        await asyncio.sleep(10)\\n\",\n    \"\\n\",\n    \"        producer_task = task_group.soonify(asyncio.create_subprocess_shell)(\\n\",\n    \"            cmd=producer_cmd.replace(\\n\",\n    \"                \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"            ),\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"assert \\\"Got msg: msg='Hello world'\\\" in consumer_task.value[1].decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e8647faa\",\n   \"metadata\": {},\n   \"source\": [\n    \"You should see a similar log as the one below.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"aea11db6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\\n\",\n      \"[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\\n\",\n      \"[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\\n\",\n      \"[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[21539]: 23-06-15 07:19:25.154 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \\n\",\n      \"[21539]: 23-06-15 07:19:35.512 [INFO] consumer_example: Got msg: msg='Hello world'\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"23-06-15 07:19:44.023 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 21539...\\n\",\n      \"[21539]: 23-06-15 07:19:45.202 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[21539]: 23-06-15 07:19:45.203 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-15 07:19:45.313 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 21539 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(consumer_task.value[1].decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9572ebf3\",\n   \"metadata\": {},\n   \"source\": [\n    \"Inside the log, you should see the \\\"Got msg: msg=\\\\'Hello world\\\\'\\\" being logged by your consumer.\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_12_Batch_Consuming.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"84b22f68\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Batch consuming\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a35b05c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"import platform\\n\",\n    \"\\n\",\n    \"import asyncer\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import run_and_match\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8c655e4f\",\n   \"metadata\": {},\n   \"source\": [\n    \"If you want to consume data in batches `@consumes` decorator makes that possible for you. By typing a consumed msg object as a `list` of messages the consumer will call your consuming function with a batch of messages consumed from a single partition. Let's demonstrate that now.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"18535f2f\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Consume function with batching\\n\",\n    \"\\n\",\n    \"To consume messages in batches, you need to wrap you message type into a list and the `@consumes` decorator will take care of the rest for you. Your consumes function will be called with batches grouped by partition now.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"d09190cd\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"@app.consumes(auto_offset_reset=\\\"earliest\\\")\\n\",\n       \"async def on_hello_world(msg: List[HelloWorld]):\\n\",\n       \"    logger.info(f\\\"Got msg batch: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"consumes_decorator_batch = \\\"\\\"\\\"@app.consumes(auto_offset_reset=\\\"earliest\\\")\\n\",\n    \"async def on_hello_world(msg: List[HelloWorld]):\\n\",\n    \"    logger.info(f\\\"Got msg batch: {msg}\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{consumes_decorator_batch}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f66b68d7\",\n   \"metadata\": {},\n   \"source\": [\n    \"## App example\\n\",\n    \"\\n\",\n    \"We will modify the app example from [@consumes basics](/docs/guides/Guide_11_Consumes_Basics.md) guide to consume `HelloWorld` messages batch. The final app will look like this (make sure you replace the `<url_of_your_kafka_bootstrap_server>` and `<port_of_your_kafka_bootstrap_server>` with the actual values):\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"673c7f8a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"app = \\\"\\\"\\\"\\n\",\n    \"import asyncio\\n\",\n    \"from typing import List\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"\\n\",\n    \"class HelloWorld(BaseModel):\\n\",\n    \"    msg: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"Hello\\\",\\n\",\n    \"        description=\\\"Demo hello world message\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"demo_broker\\\": {\\n\",\n    \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n    \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n    \"\\n\",\n    \"\\\"\\\"\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2abb4c3d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"from typing import List\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from fastkafka._components.logger import get_logger\\n\",\n       \"\\n\",\n       \"logger = get_logger(__name__)\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"@app.consumes(auto_offset_reset=\\\"earliest\\\")\\n\",\n       \"async def on_hello_world(msg: List[HelloWorld]):\\n\",\n       \"    logger.info(f\\\"Got msg batch: {msg}\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"batch_example = app + consumes_decorator_batch\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{batch_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d4ec6dab\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Send the messages to kafka topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c81e65bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets send a couple of `HelloWorld` messages to the *hello_world* topic and check if our consumer kafka application has logged the received messages batch. In your terminal, run the following command at least two times to create multiple messages in your kafka queue:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6ef181f6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"echo { ^\\\"msg^\\\": ^\\\"Hello world^\\\" }\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"echo { ^\\\"msg^\\\": ^\\\"Hello world^\\\" } | kafka-console-producer.bat --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_extension = \\\".bat\\\" if platform.system() == \\\"Windows\\\" else \\\".sh\\\"\\n\",\n    \"escape_char = \\\"^\\\" if platform.system() == \\\"Windows\\\" else \\\"\\\\\\\\\\\"\\n\",\n    \"\\n\",\n    \"kafka_msg = 'echo {{ {escape_char}\\\"msg{escape_char}\\\": {escape_char}\\\"Hello world{escape_char}\\\" }}'.format(escape_char=escape_char)\\n\",\n    \"\\n\",\n    \"producer_cmd = f'{kafka_msg} | kafka-console-producer'+script_extension+' --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>'\\n\",\n    \"md(f\\\"```shell\\\\n{producer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"01604778\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"Now we can run the app. Copy the code of the example app in consumer_example.py and run it by running\\n\",\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_file = \\\"consumer_example.py\\\"\\n\",\n    \"filename = script_file.split(\\\".py\\\")[0]\\n\",\n    \"cmd = f\\\"fastkafka run --num-workers=1 --kafka-broker=demo_broker {filename}:app\\\"\\n\",\n    \"md(\\n\",\n    \"    f\\\"Now we can run the app. Copy the code of the example app in {script_file} and run it by running\\\\n```shell\\\\n{cmd}\\\\n```\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a66904c8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-02 12:08:26.088 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-02 12:08:26.096 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_WindowsSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-02 12:08:26.096 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-02 12:08:26.096 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-02 12:08:26.104 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-02 12:08:26.104 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-02 12:08:26.112 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-02 12:08:26.112 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"\\n\",\n      \"23-06-02 12:08:26.120 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:08:26.120 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50492\\n\",\n      \"\\n\",\n      \"23-06-02 12:08:26.120 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:08:26.128 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50493\\n\",\n      \"\\n\",\n      \"23-06-02 12:08:26.128 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:08:26.128 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50494\\n\",\n      \"\\n\",\n      \"23-06-02 12:08:26.137 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-02 12:08:26.137 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=50495\\n\",\n      \"23-06-02 12:08:26.137 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\"\n     ]\n    },\n    {\n     \"ename\": \"ValueError\",\n     \"evalue\": \"Could not start zookeeper with params: [{'zookeeper_port': 2181}, {'zookeeper_port': '50492'}, {'zookeeper_port': '50493'}, {'zookeeper_port': '50494'}]\",\n     \"output_type\": \"error\",\n     \"traceback\": [\n      \"\\u001b[1;31m---------------------------------------------------------------------------\\u001b[0m\",\n      \"\\u001b[1;31mValueError\\u001b[0m                                Traceback (most recent call last)\",\n      \"Cell \\u001b[1;32mIn[7], line 4\\u001b[0m\\n\\u001b[0;32m      1\\u001b[0m \\u001b[38;5;66;03m# | hide\\u001b[39;00m\\n\\u001b[1;32m----> 4\\u001b[0m \\u001b[38;5;28;43;01mwith\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43mApacheKafkaBroker\\u001b[49m\\u001b[43m(\\u001b[49m\\n\\u001b[0;32m      5\\u001b[0m \\u001b[43m    \\u001b[49m\\u001b[43mtopics\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43m[\\u001b[49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[38;5;124;43mhello_world\\u001b[39;49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[43m]\\u001b[49m\\u001b[43m,\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[43mapply_nest_asyncio\\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[38;5;28;43;01mTrue\\u001b[39;49;00m\\n\\u001b[0;32m      6\\u001b[0m \\u001b[43m)\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[38;5;28;43;01mas\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43mbootstrap_server\\u001b[49m\\u001b[43m:\\u001b[49m\\n\\u001b[0;32m      7\\u001b[0m \\u001b[43m    \\u001b[49m\\u001b[38;5;28;43;01masync\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[38;5;28;43;01mwith\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43masyncer\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43mcreate_task_group\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[38;5;28;43;01mas\\u001b[39;49;00m\\u001b[43m \\u001b[49m\\u001b[43mtask_group\\u001b[49m\\u001b[43m:\\u001b[49m\\n\\u001b[0;32m      8\\u001b[0m \\u001b[43m        \\u001b[49m\\u001b[43mserver_url\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[38;5;241;43m=\\u001b[39;49m\\u001b[43m \\u001b[49m\\u001b[43mbootstrap_server\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43msplit\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[38;5;124;43m:\\u001b[39;49m\\u001b[38;5;124;43m\\\"\\u001b[39;49m\\u001b[43m)\\u001b[49m\\u001b[43m[\\u001b[49m\\u001b[38;5;241;43m0\\u001b[39;49m\\u001b[43m]\\u001b[49m\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:289\\u001b[0m, in \\u001b[0;36mApacheKafkaBroker.__enter__\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    287\\u001b[0m \\u001b[38;5;28;01mdef\\u001b[39;00m \\u001b[38;5;21m__enter__\\u001b[39m(\\u001b[38;5;28mself\\u001b[39m) \\u001b[38;5;241m-\\u001b[39m\\u001b[38;5;241m>\\u001b[39m \\u001b[38;5;28mstr\\u001b[39m:\\n\\u001b[0;32m    288\\u001b[0m     \\u001b[38;5;66;03m#         ApacheKafkaBroker._check_deps()\\u001b[39;00m\\n\\u001b[1;32m--> 289\\u001b[0m     \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[38;5;28;43mself\\u001b[39;49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43mstart\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:621\\u001b[0m, in \\u001b[0;36mstart\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    618\\u001b[0m         logger\\u001b[38;5;241m.\\u001b[39merror(msg)\\n\\u001b[0;32m    619\\u001b[0m         \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;167;01mRuntimeError\\u001b[39;00m(msg)\\n\\u001b[1;32m--> 621\\u001b[0m retval \\u001b[38;5;241m=\\u001b[39m \\u001b[43mloop\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43mrun_until_complete\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[38;5;28;43mself\\u001b[39;49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43m_start\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\u001b[43m)\\u001b[49m\\n\\u001b[0;32m    622\\u001b[0m logger\\u001b[38;5;241m.\\u001b[39minfo(\\u001b[38;5;124mf\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00m\\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m\\u001b[38;5;18m__class__\\u001b[39m\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m.start(): returning \\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mretval\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m\\\"\\u001b[39m)\\n\\u001b[0;32m    623\\u001b[0m \\u001b[38;5;28;01mreturn\\u001b[39;00m retval\\n\",\n      \"File \\u001b[1;32m~\\\\dev\\\\fastkafka\\\\venv\\\\Lib\\\\site-packages\\\\nest_asyncio.py:90\\u001b[0m, in \\u001b[0;36m_patch_loop.<locals>.run_until_complete\\u001b[1;34m(self, future)\\u001b[0m\\n\\u001b[0;32m     87\\u001b[0m \\u001b[38;5;28;01mif\\u001b[39;00m \\u001b[38;5;129;01mnot\\u001b[39;00m f\\u001b[38;5;241m.\\u001b[39mdone():\\n\\u001b[0;32m     88\\u001b[0m     \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;167;01mRuntimeError\\u001b[39;00m(\\n\\u001b[0;32m     89\\u001b[0m         \\u001b[38;5;124m'\\u001b[39m\\u001b[38;5;124mEvent loop stopped before Future completed.\\u001b[39m\\u001b[38;5;124m'\\u001b[39m)\\n\\u001b[1;32m---> 90\\u001b[0m \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[43mf\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43mresult\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43m)\\u001b[49m\\n\",\n      \"File \\u001b[1;32m~\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python311\\\\Lib\\\\asyncio\\\\futures.py:203\\u001b[0m, in \\u001b[0;36mFuture.result\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    201\\u001b[0m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m__log_traceback \\u001b[38;5;241m=\\u001b[39m \\u001b[38;5;28;01mFalse\\u001b[39;00m\\n\\u001b[0;32m    202\\u001b[0m \\u001b[38;5;28;01mif\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_exception \\u001b[38;5;129;01mis\\u001b[39;00m \\u001b[38;5;129;01mnot\\u001b[39;00m \\u001b[38;5;28;01mNone\\u001b[39;00m:\\n\\u001b[1;32m--> 203\\u001b[0m     \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_exception\\u001b[38;5;241m.\\u001b[39mwith_traceback(\\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_exception_tb)\\n\\u001b[0;32m    204\\u001b[0m \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_result\\n\",\n      \"File \\u001b[1;32m~\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python311\\\\Lib\\\\asyncio\\\\tasks.py:267\\u001b[0m, in \\u001b[0;36mTask.__step\\u001b[1;34m(***failed resolving arguments***)\\u001b[0m\\n\\u001b[0;32m    263\\u001b[0m \\u001b[38;5;28;01mtry\\u001b[39;00m:\\n\\u001b[0;32m    264\\u001b[0m     \\u001b[38;5;28;01mif\\u001b[39;00m exc \\u001b[38;5;129;01mis\\u001b[39;00m \\u001b[38;5;28;01mNone\\u001b[39;00m:\\n\\u001b[0;32m    265\\u001b[0m         \\u001b[38;5;66;03m# We use the `send` method directly, because coroutines\\u001b[39;00m\\n\\u001b[0;32m    266\\u001b[0m         \\u001b[38;5;66;03m# don't have `__iter__` and `__next__` methods.\\u001b[39;00m\\n\\u001b[1;32m--> 267\\u001b[0m         result \\u001b[38;5;241m=\\u001b[39m \\u001b[43mcoro\\u001b[49m\\u001b[38;5;241;43m.\\u001b[39;49m\\u001b[43msend\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[38;5;28;43;01mNone\\u001b[39;49;00m\\u001b[43m)\\u001b[49m\\n\\u001b[0;32m    268\\u001b[0m     \\u001b[38;5;28;01melse\\u001b[39;00m:\\n\\u001b[0;32m    269\\u001b[0m         result \\u001b[38;5;241m=\\u001b[39m coro\\u001b[38;5;241m.\\u001b[39mthrow(exc)\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:561\\u001b[0m, in \\u001b[0;36m_start\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    558\\u001b[0m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mtemporary_directory \\u001b[38;5;241m=\\u001b[39m TemporaryDirectory()\\n\\u001b[0;32m    559\\u001b[0m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mtemporary_directory_path \\u001b[38;5;241m=\\u001b[39m Path(\\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mtemporary_directory\\u001b[38;5;241m.\\u001b[39m\\u001b[38;5;21m__enter__\\u001b[39m())\\n\\u001b[1;32m--> 561\\u001b[0m \\u001b[38;5;28;01mawait\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_start_zookeeper()\\n\\u001b[0;32m    562\\u001b[0m \\u001b[38;5;28;01mawait\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_start_kafka()\\n\\u001b[0;32m    564\\u001b[0m listener_port \\u001b[38;5;241m=\\u001b[39m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39mkafka_kwargs\\u001b[38;5;241m.\\u001b[39mget(\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;124mlistener_port\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m, \\u001b[38;5;241m9092\\u001b[39m)\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:515\\u001b[0m, in \\u001b[0;36m_start_zookeeper\\u001b[1;34m(self)\\u001b[0m\\n\\u001b[0;32m    512\\u001b[0m \\u001b[38;5;129m@patch\\u001b[39m\\n\\u001b[0;32m    513\\u001b[0m \\u001b[38;5;28;01masync\\u001b[39;00m \\u001b[38;5;28;01mdef\\u001b[39;00m \\u001b[38;5;21m_start_zookeeper\\u001b[39m(\\u001b[38;5;28mself\\u001b[39m: ApacheKafkaBroker) \\u001b[38;5;241m-\\u001b[39m\\u001b[38;5;241m>\\u001b[39m \\u001b[38;5;28;01mNone\\u001b[39;00m:\\n\\u001b[0;32m    514\\u001b[0m \\u001b[38;5;250m    \\u001b[39m\\u001b[38;5;124;03m\\\"\\\"\\\"Starts a local ZooKeeper instance asynchronously.\\\"\\\"\\\"\\u001b[39;00m\\n\\u001b[1;32m--> 515\\u001b[0m     \\u001b[38;5;28;01mreturn\\u001b[39;00m \\u001b[38;5;28;01mawait\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m\\u001b[38;5;241m.\\u001b[39m_start_service(\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;124mzookeeper\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m)\\n\",\n      \"File \\u001b[1;32mc:\\\\users\\\\kumaran rajendhiran\\\\dev\\\\fastkafka\\\\fastkafka\\\\_testing\\\\apache_kafka_broker.py:503\\u001b[0m, in \\u001b[0;36m_start_service\\u001b[1;34m(self, service)\\u001b[0m\\n\\u001b[0;32m    500\\u001b[0m         \\u001b[38;5;28msetattr\\u001b[39m(\\u001b[38;5;28mself\\u001b[39m, \\u001b[38;5;124mf\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mservice\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m_task\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m, service_task)\\n\\u001b[0;32m    501\\u001b[0m         \\u001b[38;5;28;01mreturn\\u001b[39;00m\\n\\u001b[1;32m--> 503\\u001b[0m \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;167;01mValueError\\u001b[39;00m(\\u001b[38;5;124mf\\u001b[39m\\u001b[38;5;124m\\\"\\u001b[39m\\u001b[38;5;124mCould not start \\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mservice\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m with params: \\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mconfigs_tried\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[38;5;124m\\\"\\u001b[39m)\\n\",\n      \"\\u001b[1;31mValueError\\u001b[0m: Could not start zookeeper with params: [{'zookeeper_port': 2181}, {'zookeeper_port': '50492'}, {'zookeeper_port': '50493'}, {'zookeeper_port': '50494'}]\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=12092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    async with asyncer.create_task_group() as task_group:\\n\",\n    \"        server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"        server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"\\n\",\n    \"        producer_tasks = [task_group.soonify(asyncio.create_subprocess_shell)(\\n\",\n    \"            cmd=producer_cmd.replace(\\n\",\n    \"                \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"            ),\\n\",\n    \"            stdout=asyncio.subprocess.PIPE,\\n\",\n    \"            stderr=asyncio.subprocess.PIPE,\\n\",\n    \"        ) for _ in range(2)]\\n\",\n    \"        \\n\",\n    \"        await asyncio.sleep(5)\\n\",\n    \"        \\n\",\n    \"        consumer_task = task_group.soonify(run_script_and_cancel)(\\n\",\n    \"            script=batch_example.replace(\\n\",\n    \"                \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"            ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"            script_file=script_file,\\n\",\n    \"            cmd=cmd,\\n\",\n    \"            cancel_after=20,\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"assert \\\"Got msg batch: [HelloWorld(msg='Hello world'), HelloWorld(msg='Hello world')]\\\" in consumer_task.value[1].decode(\\\"UTF-8\\\"), consumer_task.value[1].decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e86e202e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | echo: False\\n\",\n    \"\\n\",\n    \"print(consumer_task.value[1].decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9292901c\",\n   \"metadata\": {},\n   \"source\": [\n    \"You should see the your Kafka messages being logged in batches by your consumer.\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_21_Produces_Basics.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"84b22f68\",\n   \"metadata\": {},\n   \"source\": [\n    \"# @produces basics\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"7a35b05c\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import platform\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import run_and_match\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8c655e4f\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can use `@produces` decorator to produce messages to Kafka topics. \\n\",\n    \"\\n\",\n    \"In this guide we will create a simple FastKafka app that will produce hello world messages to hello_world topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"18535f2f\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Import `FastKafka`\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b4ef969d\",\n   \"metadata\": {},\n   \"source\": [\n    \"To use the `@produces` decorator, frist we need to import the base FastKafka app to create our application.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3d6a8fae\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_fastkafka = \\\"\\\"\\\"from fastkafka import FastKafka\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{import_fastkafka}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"09cb37e9\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Define the structure of the messages\\n\",\n    \"Next, you need to define the structure of the messages you want to send to the topic using [pydantic](https://docs.pydantic.dev/). For the guide we'll stick to something basic, but you are free to define any complex message structure you wish in your project, just make sure it can be JSON encoded.\\n\",\n    \"\\n\",\n    \"Let's import `BaseModel` and `Field` from pydantic and create a simple `HelloWorld` class containing one string parameter `msg`\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f83265a1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"import_pydantic = \\\"\\\"\\\"from pydantic import BaseModel, Field\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{import_pydantic}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9c1e6ec8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"define_HelloWorld = \\\"\\\"\\\"\\n\",\n    \"class HelloWorld(BaseModel):\\n\",\n    \"    msg: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"Hello\\\",\\n\",\n    \"        description=\\\"Demo hello world message\\\",\\n\",\n    \"    )\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{define_HelloWorld}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fc1d810c\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Create a base FastKafka app\\n\",\n    \"\\n\",\n    \"Now we will create and define a base FastKafka app, replace the `<url_of_your_kafka_bootstrap_server>` and `<port_of_your_kafka_bootstrap_server>` with the actual values of your Kafka bootstrap server\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2732642f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"create_app = \\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"demo_broker\\\": {\\n\",\n    \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n    \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{create_app}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6e41ebe6\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Create a producer function and decorate it with `@produces`\\n\",\n    \"\\n\",\n    \"Let's create a producer function that will produce `HelloWorld` messages to *hello_world* topic:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"93dd7a11\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msg: str) -> HelloWorld:\\n\",\n       \"    return HelloWorld(msg=msg)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"decorate_produces = \\\"\\\"\\\"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_hello_world(msg: str) -> HelloWorld:\\n\",\n    \"    return HelloWorld(msg=msg)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_produces}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4216f0f1\",\n   \"metadata\": {},\n   \"source\": [\n    \"Now you can call your defined function as any normal python function in your code. The side effect of calling the function will be that the value you are returning will also be sent to a kafka topic.\\n\",\n    \"\\n\",\n    \"By default, the topic is determined from your function name, the \\\"to_\\\" prefix is stripped and what is left over is used as a topic name. I  this case, that is *hello_world*.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f02f147e\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Instruct the app to start sending HelloWorld messages\\n\",\n    \"\\n\",\n    \"Let's use `@run_in_background` decorator to instruct our app to send HelloWorld messages to hello_world topic every second.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"386ce09a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"\\n\",\n       \"@app.run_in_background()\\n\",\n       \"async def hello_every_second():\\n\",\n       \"    while(True):\\n\",\n       \"        await to_hello_world(msg=\\\"Hello world!\\\")\\n\",\n       \"        await asyncio.sleep(1)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"define_run = \\\"\\\"\\\"\\n\",\n    \"import asyncio\\n\",\n    \"\\n\",\n    \"@app.run_in_background()\\n\",\n    \"async def hello_every_second():\\n\",\n    \"    while(True):\\n\",\n    \"        await to_hello_world(msg=\\\"Hello world!\\\")\\n\",\n    \"        await asyncio.sleep(1)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{define_run}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b68c95f7\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Final app\\n\",\n    \"\\n\",\n    \"Your app code should look like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cd7d88d3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msg: str) -> HelloWorld:\\n\",\n       \"    return HelloWorld(msg=msg)\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"\\n\",\n       \"@app.run_in_background()\\n\",\n       \"async def hello_every_second():\\n\",\n       \"    while(True):\\n\",\n       \"        await to_hello_world(msg=\\\"Hello world!\\\")\\n\",\n       \"        await asyncio.sleep(1)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"produces_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_produces\\n\",\n    \"    + define_run\\n\",\n    \")\\n\",\n    \"md(f\\\"```python\\\\n{produces_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8fa74215\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Run the app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"afe529ab\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n\",\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"script_file = \\\"producer_example.py\\\"\\n\",\n    \"cmd = \\\"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\\\"\\n\",\n    \"md(\\n\",\n    \"    f\\\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\\\n```shell\\\\n{cmd}\\\\n```\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e66be2b9\",\n   \"metadata\": {},\n   \"source\": [\n    \"After running the command, you should see this output in your terminal:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"aad42638\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def _run_example_app(\\n\",\n    \"    *, app_example: str, bootstrap_server: str, script_file: str, cmd: str\\n\",\n    \") -> Tuple[int, str]:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=app_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"    return exit_code, output.decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"335ec62d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 84224...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 84224 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 83864...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 83864 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topicas=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=21092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    exit_code, output = await _run_example_app(\\n\",\n    \"        app_example=produces_example,\\n\",\n    \"        bootstrap_server=bootstrap_server,\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"    )\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, exit_code\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"cf40711b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\\n\",\n      \"[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\\n\",\n      \"[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\\n\",\n      \"[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\\n\",\n      \"[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\\n\",\n      \"[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\\n\",\n      \"[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\\n\",\n      \"[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\\n\",\n      \"[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(output)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d4ec6dab\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Check if the message was sent to the Kafka topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c81e65bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets check the topic and see if there is a \\\"Hello world!\\\" message in the hello_world topic. In your terminal run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6ef181f6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_extension = \\\".bat\\\" if platform.system() == \\\"Windows\\\" else \\\".sh\\\"\\n\",\n    \"consumer_cmd = f\\\"kafka-console-consumer{script_extension} -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\\\"\\n\",\n    \"md(f\\\"```shell\\\\n{consumer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a66904c8\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"stdout=, stderr=, returncode=1\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup falied, generating a new port and retrying...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: port=34095\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 88797...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 88797 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 87215...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 87215 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 86502...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 86502 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"produces_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_produces\\n\",\n    \"    + define_run\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=21092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    exit_code, output = await _run_example_app(\\n\",\n    \"        app_example=produces_example,\\n\",\n    \"        bootstrap_server=bootstrap_server,\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, exit_code\\n\",\n    \"\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        *consumer_cmd.replace(\\n\",\n    \"            \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"        ).split(\\\" \\\"),\\n\",\n    \"        pattern='{\\\"msg\\\":\\\"Hello world!\\\"}',\\n\",\n    \"        timeout=30,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await terminate_asyncio_process(proc)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9292901c\",\n   \"metadata\": {},\n   \"source\": [\n    \"You should see the {\\\"msg\\\": \\\"Hello world!\\\"} messages in your topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"49b9c46e\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Choosing a topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"ed9e5f0c\",\n   \"metadata\": {},\n   \"source\": [\n    \"You probably noticed that you didn't define which topic you are sending the message to, this is because the `@produces` decorator determines the topic by default from your function name.\\n\",\n    \"The decorator will take your function name and strip the default \\\"to_\\\" prefix from it and use the rest as the topic name. In this example case, the topic is *hello_world*.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4938a558\",\n   \"metadata\": {},\n   \"source\": [\n    \"!!! warn \\\\\\\"New topics\\\\\\\"\\n\",\n    \"\\n\",\n    \"    Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"dc3e9f8a\",\n   \"metadata\": {},\n   \"source\": [\n    \"You can choose your custom prefix by defining the `prefix` parameter in produces decorator, like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"da734ca4\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"@app.produces(prefix=\\\"send_to_\\\")\\n\",\n       \"async def send_to_hello_world(msg: str) -> HelloWorld:\\n\",\n       \"    return HelloWorld(msg=msg)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: False\\n\",\n    \"decorate_produces_prefix = \\\"\\\"\\\"\\n\",\n    \"@app.produces(prefix=\\\"send_to_\\\")\\n\",\n    \"async def send_to_hello_world(msg: str) -> HelloWorld:\\n\",\n    \"    return HelloWorld(msg=msg)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_produces_prefix}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"b7563b45\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 90304...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 90304 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 89536...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 89536 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 89174...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 89174 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"produces_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_produces_prefix\\n\",\n    \"    + define_run.replace(\\\"to_hello_world\\\", \\\"send_to_hello_world\\\")\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=21092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    exit_code, output = await _run_example_app(\\n\",\n    \"        app_example=produces_example,\\n\",\n    \"        bootstrap_server=bootstrap_server,\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, exit_code\\n\",\n    \"\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        *consumer_cmd.replace(\\n\",\n    \"            \\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"        ).split(\\\" \\\"),\\n\",\n    \"        pattern='{\\\"msg\\\":\\\"Hello world!\\\"}',\\n\",\n    \"        timeout=30,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await terminate_asyncio_process(proc)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0c4225dd\",\n   \"metadata\": {},\n   \"source\": [\n    \"Also, you can define the topic name completely by defining the `topic` in parameter in produces decorator, like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9bf881b6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"@app.produces(topic=\\\"my_special_topic\\\")\\n\",\n       \"async def to_hello_world(msg: str) -> HelloWorld:\\n\",\n       \"    return HelloWorld(msg=msg)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: False\\n\",\n    \"decorate_produces_topic = \\\"\\\"\\\"\\n\",\n    \"@app.produces(topic=\\\"my_special_topic\\\")\\n\",\n    \"async def to_hello_world(msg: str) -> HelloWorld:\\n\",\n    \"    return HelloWorld(msg=msg)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"md(f\\\"```python\\\\n{decorate_produces_topic}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"612baa8c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 91793...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 91793 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 91026...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 91026 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 90665...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 90665 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"produces_example = (\\n\",\n    \"    import_fastkafka\\n\",\n    \"    + import_pydantic\\n\",\n    \"    + define_HelloWorld\\n\",\n    \"    + create_app\\n\",\n    \"    + decorate_produces_topic\\n\",\n    \"    + define_run\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"my_special_topic\\\"], apply_nest_asyncio=True, listener_port=21092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    exit_code, output = await _run_example_app(\\n\",\n    \"        app_example=produces_example,\\n\",\n    \"        bootstrap_server=bootstrap_server,\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, exit_code\\n\",\n    \"\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        *consumer_cmd.replace(\\\"<addr_of_your_kafka_bootstrap_server>\\\", bootstrap_server)\\n\",\n    \"        .replace(\\\"hello_world\\\", \\\"my_special_topic\\\")\\n\",\n    \"        .split(\\\" \\\"),\\n\",\n    \"        pattern='{\\\"msg\\\":\\\"Hello world!\\\"}',\\n\",\n    \"        timeout=30,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await terminate_asyncio_process(proc)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"da50972c\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Message data\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"7e4e8052\",\n   \"metadata\": {},\n   \"source\": [\n    \"The return value from your function will be translated JSON string and then to bytes and sent to defined Kafka topic. The typing of the return value is used for generating the documentation for your Kafka app.\\n\",\n    \"\\n\",\n    \"In this example case, the return value is HelloWorld class which will be translated into JSON formatted string and then to bytes. The translated data will then be sent to Kafka. In the from of: `b'{\\\"msg\\\":\\\"Hello world!\\\"}'`\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_22_Partition_Keys.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8e1ec46b\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Defining a partition key\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dcfccd0f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import platform\\n\",\n    \"\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import run_and_match\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"61526c5c\",\n   \"metadata\": {},\n   \"source\": [\n    \"Partition keys are used in Apache Kafka to determine which partition a message should be written to. This ensures that related messages are kept together in the same partition, which can be useful for ensuring order or for grouping related messages together for efficient processing. Additionally, partitioning data across multiple partitions allows Kafka to distribute load across multiple brokers and scale horizontally, while replicating data across multiple brokers provides fault tolerance.\\n\",\n    \"\\n\",\n    \"You can define your partition keys when using the `@produces` decorator, this guide will demonstrate to you this feature.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3237efbe\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Return a key from the producing function\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d0af82f7\",\n   \"metadata\": {},\n   \"source\": [\n    \"To define a key for the message that you want to produce to Kafka topic, you need to wrap the response into `KafkaEvent` class and set the key value. Check the example below:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"109cf37d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"from fastkafka import KafkaEvent\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\\n\",\n       \"    return KafkaEvent(HelloWorld(msg=msg), key=b\\\"my_key\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"hello_world_with_key = \\\"\\\"\\\"\\n\",\n    \"from fastkafka import KafkaEvent\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\\n\",\n    \"    return KafkaEvent(HelloWorld(msg=msg), key=b\\\"my_key\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{hello_world_with_key}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"034df146\",\n   \"metadata\": {},\n   \"source\": [\n    \"In the example, we want to return the `HelloWorld` message class with the key defined as *my_key*. So, we wrap the message and key into a KafkaEvent class and return it as such.\\n\",\n    \"\\n\",\n    \"While generating the documentation, the `KafkaEvent` class will be unwrapped and the `HelloWorld` class will be documented in the definition of message type, same way if you didn't use the key.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d07451d1\",\n   \"metadata\": {},\n   \"source\": [\n    \"!!! info \\\\\\\"Which key to choose?\\\\\\\"\\n\",\n    \"\\n\",\n    \"    Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"84d30a25\",\n   \"metadata\": {},\n   \"source\": [\n    \"## App example\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d3e2c6ad\",\n   \"metadata\": {},\n   \"source\": [\n    \"We will modify the app example from **@producer basics** guide to return the `HelloWorld` with our key. The final app will look like this (make sure you replace the `<url_of_your_kafka_bootstrap_server>` and `<port_of_your_kafka_bootstrap_server>` with the actual values):\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"673c7f8a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"app = \\\"\\\"\\\"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"class HelloWorld(BaseModel):\\n\",\n    \"    msg: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"Hello\\\",\\n\",\n    \"        description=\\\"Demo hello world message\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"demo_broker\\\": {\\n\",\n    \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n    \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n    \"\\\"\\\"\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"11fba86b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"bg_run = \\\"\\\"\\\"\\n\",\n    \"import asyncio\\n\",\n    \"\\n\",\n    \"@app.run_in_background()\\n\",\n    \"async def hello_every_second():\\n\",\n    \"    while(True):\\n\",\n    \"        await to_hello_world(msg=\\\"Hello world!\\\")\\n\",\n    \"        await asyncio.sleep(1)\\n\",\n    \"\\\"\\\"\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2abb4c3d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"from fastkafka import KafkaEvent\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\\n\",\n       \"    return KafkaEvent(HelloWorld(msg=msg), key=b\\\"my_key\\\")\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"\\n\",\n       \"@app.run_in_background()\\n\",\n       \"async def hello_every_second():\\n\",\n       \"    while(True):\\n\",\n       \"        await to_hello_world(msg=\\\"Hello world!\\\")\\n\",\n       \"        await asyncio.sleep(1)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"key_example = app + hello_world_with_key + bg_run\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{key_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"afd85e8b\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Run the app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ce98e25b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n\",\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_file = \\\"producer_with_key_example.py\\\"\\n\",\n    \"cmd = \\\"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\\\"\\n\",\n    \"md(\\n\",\n    \"    f\\\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\\\n```shell\\\\n{cmd}\\\\n```\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"cedba893\",\n   \"metadata\": {},\n   \"source\": [\n    \"After running the command, you should see this output in your terminal:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0049ee5c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 347072...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 347072 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 346711...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 346711 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=22092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=key_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, output.decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"55632ae6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\\n\",\n      \"[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\\n\",\n      \"[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\\n\",\n      \"[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\\n\",\n      \"[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\\n\",\n      \"[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\\n\",\n      \"[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d4ec6dab\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Check if the message was sent to the Kafka topic with the desired key\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c81e65bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets check the topic and see if there is a \\\"Hello world!\\\" message in the hello_world topic with the defined key. In your terminal run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6ef181f6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_extension = \\\".bat\\\" if platform.system() == \\\"Windows\\\" else \\\".sh\\\"\\n\",\n    \"consumer_cmd = f\\\"kafka-console-consumer{script_extension} --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\\\"\\n\",\n    \"md(f\\\"```shell\\\\n{consumer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"476e0e5d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 348982...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 348982 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 348216...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 348216 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 347855...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 347855 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"expected_msg = 'my_key\\t{\\\"msg\\\":\\\"Hello world!\\\"}'\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=22092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=key_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, output.decode(\\\"UTF-8\\\")\\n\",\n    \"\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        *consumer_cmd.replace(\\n\",\n    \"            \\\"<address_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"        ).split(\\\" \\\"),\\n\",\n    \"        pattern=expected_msg,\\n\",\n    \"        timeout=30,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await terminate_asyncio_process(proc)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e523a7d3\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"You should see the *my_key\\t{\\\"msg\\\": \\\"Hello world!\\\"}* messages in your topic appearing, the *my_key* part of the message is the key that we defined in our producing function.\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"md(\\n\",\n    \"    f\\\"You should see the *{expected_msg}* messages in your topic appearing, the *my_key* part of the message is the key that we defined in our producing function.\\\"\\n\",\n    \")\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_23_Batch_Producing.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8e1ec46b\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Batch producing\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dcfccd0f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import platform\\n\",\n    \"\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from fastkafka._components._subprocess import terminate_asyncio_process\\n\",\n    \"from fastkafka._testing.apache_kafka_broker import run_and_match\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"61526c5c\",\n   \"metadata\": {},\n   \"source\": [\n    \"If you want to send your data in batches `@produces` decorator makes that possible for you. By returning a `list` of messages you want to send in a batch the producer will collect the messages and send them in a batch to a Kafka broker.\\n\",\n    \"\\n\",\n    \"This guide will demonstrate how to use this feature.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3237efbe\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Return a batch from the producing function\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d0af82f7\",\n   \"metadata\": {},\n   \"source\": [\n    \"To define a batch that you want to produce to Kafka topic, you need to return the `List` of the messages that you want to be batched from your producing function.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"109cf37d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"from typing import List\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\\n\",\n       \"    return [HelloWorld(msg=msg) for msg in msgs]\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"hello_world_batch = \\\"\\\"\\\"\\n\",\n    \"from typing import List\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\\n\",\n    \"    return [HelloWorld(msg=msg) for msg in msgs]\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{hello_world_batch}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"034df146\",\n   \"metadata\": {},\n   \"source\": [\n    \"In the example, we want to return the `HelloWorld` message class batch that is created from a list of msgs we passed into our producing function.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"86b104f9\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets also prepare a backgound task that will send a batch of \\\"hello world\\\" messages when the app starts.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"11fba86b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"@app.run_in_background()\\n\",\n       \"async def prepare_and_send_hello_batch():\\n\",\n       \"    msgs=[f\\\"Hello world {i}\\\" for i in range(10)]\\n\",\n       \"    await to_hello_world(msgs)\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"bg_run = \\\"\\\"\\\"\\n\",\n    \"@app.run_in_background()\\n\",\n    \"async def prepare_and_send_hello_batch():\\n\",\n    \"    msgs=[f\\\"Hello world {i}\\\" for i in range(10)]\\n\",\n    \"    await to_hello_world(msgs)\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{bg_run}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"84d30a25\",\n   \"metadata\": {},\n   \"source\": [\n    \"## App example\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d3e2c6ad\",\n   \"metadata\": {},\n   \"source\": [\n    \"We will modify the app example from [@producer basics](/docs/guides/Guide_21_Produces_Basics.md) guide to return the `HelloWorld` batch. The final app will look like this (make sure you replace the `<url_of_your_kafka_bootstrap_server>` and `<port_of_your_kafka_bootstrap_server>` with the actual values):\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"673c7f8a\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"app = \\\"\\\"\\\"\\n\",\n    \"import asyncio\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"class HelloWorld(BaseModel):\\n\",\n    \"    msg: str = Field(\\n\",\n    \"        ...,\\n\",\n    \"        example=\\\"Hello\\\",\\n\",\n    \"        description=\\\"Demo hello world message\\\",\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"demo_broker\\\": {\\n\",\n    \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n    \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n    \"    }\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n    \"\\\"\\\"\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"2abb4c3d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"@app.run_in_background()\\n\",\n       \"async def prepare_and_send_hello_batch():\\n\",\n       \"    msgs=[f\\\"Hello world {i}\\\" for i in range(10)]\\n\",\n       \"    await to_hello_world(msgs)\\n\",\n       \"\\n\",\n       \"from typing import List\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\\n\",\n       \"    return [HelloWorld(msg=msg) for msg in msgs]\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"batch_example = app + bg_run + hello_world_batch\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{batch_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"afd85e8b\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Run the app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ce98e25b\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n\",\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_file = \\\"producer_with_key_example.py\\\"\\n\",\n    \"cmd = \\\"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\\\"\\n\",\n    \"md(\\n\",\n    \"    f\\\"Now we can run the app. Copy the code above in producer_example.py and run it by running\\\\n```shell\\\\n{cmd}\\\\n```\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"cedba893\",\n   \"metadata\": {},\n   \"source\": [\n    \"After running the command, you should see this output in your terminal:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0049ee5c\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 45714...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 45714 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 45353...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 45353 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=23092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=batch_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, f'{exit_code=}, {output.decode(\\\"UTF-8\\\")}'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"55632ae6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\\n\",\n      \"[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\\n\",\n      \"[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\\n\",\n      \"[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\\n\",\n      \"[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\\n\",\n      \"[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\\n\",\n      \"[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\\n\",\n      \"[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d4ec6dab\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Check if the batch was sent to the Kafka topic with the defined key\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c81e65bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets check the topic and see if there are \\\"Hello world\\\" messages in the hello_world topic. In your terminal run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"6ef181f6\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_extension = \\\".bat\\\" if platform.system() == \\\"Windows\\\" else \\\".sh\\\"\\n\",\n    \"consumer_cmd = f\\\"kafka-console-consumer{script_extension} --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\\\"\\n\",\n    \"md(f\\\"```shell\\\\n{consumer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"476e0e5d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 47627...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 47627 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 46861...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 46861 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 46500...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 46500 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"expected_msg = '{\\\"msg\\\":\\\"Hello world *[0-9]\\\"}'\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=23092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=batch_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, f'{exit_code=}, {output.decode(\\\"UTF-8\\\")}'\\n\",\n    \"\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        *consumer_cmd.replace(\\n\",\n    \"            \\\"<address_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"        ).split(\\\" \\\"),\\n\",\n    \"        pattern=expected_msg,\\n\",\n    \"        timeout=30,\\n\",\n    \"        num_to_match=10\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await terminate_asyncio_process(proc)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"51f90b8b\",\n   \"metadata\": {},\n   \"source\": [\n    \"You should see the batch of messages in your topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6ecc09af\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Batch key\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1266f878\",\n   \"metadata\": {},\n   \"source\": [\n    \"To define a key for your batch like in [Defining a partition key](/docs/guides/Guide_22_Partition_Keys.md) guide you can wrap the returning value in a `KafkaEvent` class. To learn more about defining a partition ke and `KafkaEvent` class, please, have a look at [Defining a partition key](/docs/guides/Guide_22_Partition_Keys.md) guide.\\n\",\n    \"\\n\",\n    \"Let's demonstrate that.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e6859c55\",\n   \"metadata\": {},\n   \"source\": [\n    \"To define a key, we just need to modify our producing function, like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e0336bb1\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"from typing import List\\n\",\n       \"from fastkafka import KafkaEvent\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\\n\",\n       \"    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\\\"my_key\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"hello_world_batch_key = \\\"\\\"\\\"\\n\",\n    \"from typing import List\\n\",\n    \"from fastkafka import KafkaEvent\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\\n\",\n    \"    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\\\"my_key\\\")\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{hello_world_batch_key}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2f7e890c\",\n   \"metadata\": {},\n   \"source\": [\n    \"Now our app looks like this:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"463db2f9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```python\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"class HelloWorld(BaseModel):\\n\",\n       \"    msg: str = Field(\\n\",\n       \"        ...,\\n\",\n       \"        example=\\\"Hello\\\",\\n\",\n       \"        description=\\\"Demo hello world message\\\",\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"demo_broker\\\": {\\n\",\n       \"        \\\"url\\\": \\\"<url_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"        \\\"description\\\": \\\"local demo kafka broker\\\",\\n\",\n       \"        \\\"port\\\": \\\"<port_of_your_kafka_bootstrap_server>\\\",\\n\",\n       \"    }\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n       \"\\n\",\n       \"@app.run_in_background()\\n\",\n       \"async def prepare_and_send_hello_batch():\\n\",\n       \"    msgs=[f\\\"Hello world {i}\\\" for i in range(10)]\\n\",\n       \"    await to_hello_world(msgs)\\n\",\n       \"\\n\",\n       \"from typing import List\\n\",\n       \"from fastkafka import KafkaEvent\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\\n\",\n       \"    return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b\\\"my_key\\\")\\n\",\n       \"\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"batch_key_example = app + bg_run + hello_world_batch_key\\n\",\n    \"\\n\",\n    \"md(f\\\"```python\\\\n{batch_key_example}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6f0832bc\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Check if the batch was sent to the Kafka topic\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"6ce14634\",\n   \"metadata\": {},\n   \"source\": [\n    \"Lets check the topic and see if there are \\\"Hello world\\\" messages in the hello_world topic, containing a defined key. In your terminal run:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3934d700\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"```shell\\n\",\n       \"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_extension = \\\".bat\\\" if platform.system() == \\\"Windows\\\" else \\\".sh\\\"\\n\",\n    \"consumer_cmd = f\\\"kafka-console-consumer{script_extension} --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\\\"\\n\",\n    \"md(f\\\"```shell\\\\n{consumer_cmd}\\\\n```\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"31ee42ec\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 49116...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 49116 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 48349...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 48349 terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 47988...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 47988 terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"expected_msg = 'my_key\\t{\\\"msg\\\":\\\"Hello world *[0-9]\\\"}'\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"hello_world\\\"], apply_nest_asyncio=True, listener_port=23092\\n\",\n    \") as bootstrap_server:\\n\",\n    \"    server_url = bootstrap_server.split(\\\":\\\")[0]\\n\",\n    \"    server_port = bootstrap_server.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=batch_key_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server>\\\", server_url\\n\",\n    \"        ).replace(\\\"<port_of_your_kafka_bootstrap_server>\\\", server_port),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, f'{exit_code=}, {output.decode(\\\"UTF-8\\\")}'\\n\",\n    \"\\n\",\n    \"    proc = await run_and_match(\\n\",\n    \"        *consumer_cmd.replace(\\n\",\n    \"            \\\"<address_of_your_kafka_bootstrap_server>\\\", bootstrap_server\\n\",\n    \"        ).split(\\\" \\\"),\\n\",\n    \"        pattern=expected_msg,\\n\",\n    \"        timeout=30,\\n\",\n    \"        num_to_match=1\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    await terminate_asyncio_process(proc)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4ae5ba3c\",\n   \"metadata\": {},\n   \"source\": [\n    \"You should see the batch of messages with the defined key in your topic.\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_24_Using_Multiple_Kafka_Clusters.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8e1ec46b\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Using multiple Kafka clusters\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dcfccd0f\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import platform\\n\",\n    \"import pytest\\n\",\n    \"from IPython.display import Markdown as md\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from fastkafka.testing import Tester, ApacheKafkaBroker, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"61526c5c\",\n   \"metadata\": {},\n   \"source\": [\n    \"Ready to take your FastKafka app to the next level? This guide shows you how to connect to multiple Kafka clusters effortlessly. Consolidate topics and produce messages across clusters like a pro. \\n\",\n    \"Unleash the full potential of your Kafka-powered app with FastKafka. Let's dive in and elevate your application's capabilities!\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"099c41ef\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Test message\\n\",\n    \"\\n\",\n    \"To showcase the functionalities of FastKafka and illustrate the concepts discussed, we can use a simple test message called `TestMsg`. Here's the definition of the `TestMsg` class:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c4828bd1\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"79d89a52\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Defining multiple broker configurations\\n\",\n    \"\\n\",\n    \"When building a FastKafka application, you may need to consume messages from multiple Kafka clusters, each with its own set of broker configurations. FastKafka provides the flexibility to define different broker clusters using the brokers argument in the consumes decorator. Let's explore an example code snippet\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"53fb0f9b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"kafka_brokers_1 = dict(\\n\",\n    \"    development=dict(url=\\\"dev.server_1\\\", port=9092),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"kafka_brokers_2 = dict(\\n\",\n    \"    development=dict(url=\\\"dev.server_2\\\", port=9092),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1, bootstrap_servers_id=\\\"development\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_preprocessed_signals_1(msg: TestMsg):\\n\",\n    \"    print(f\\\"Received on s1: {msg=}\\\")\\n\",\n    \"    await to_predictions_1(msg)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals_2(msg: TestMsg):\\n\",\n    \"    print(f\\\"Received on s2: {msg=}\\\")\\n\",\n    \"    await to_predictions_2(msg)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions_1(msg: TestMsg) -> TestMsg:\\n\",\n    \"    return msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def to_predictions_2(msg: TestMsg) -> TestMsg:\\n\",\n    \"    return msg\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a8fed8cc\",\n   \"metadata\": {},\n   \"source\": [\n    \"In this example, the application has two consumes endpoints, both of which will consume events from `preprocessed_signals` topic. `on_preprocessed_signals_1` will consume events from `kafka_brokers_1` configuration and `on_preprocessed_signals_2` will consume events from `kafka_brokers_2` configuration.\\n\",\n    \"When producing, `to_predictions_1` will produce to `predictions` topic on `kafka_brokers_1` cluster and `to_predictions_2` will produce to `predictions` topic on `kafka_brokers_2` cluster.\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"#### How it works\\n\",\n    \"\\n\",\n    \"The `kafka_brokers_1` configuration represents the primary cluster, while `kafka_brokers_2` serves as an alternative cluster specified in the decorator.\\n\",\n    \"\\n\",\n    \"Using the FastKafka class, the app object is initialized with the primary broker configuration (`kafka_brokers_1`). By default, the `@app.consumes` decorator without the brokers argument consumes messages from the `preprocessed_signals` topic on `kafka_brokers_1`.\\n\",\n    \"\\n\",\n    \"To consume messages from a different cluster, the `@app.consumes` decorator includes the `brokers` argument. This allows explicit specification of the broker cluster in the `on_preprocessed_signals_2` function, enabling consumption from the same topic but using the `kafka_brokers_2` configuration.\\n\",\n    \"\\n\",\n    \"The brokers argument can also be used in the @app.produces decorator to define multiple broker clusters for message production.\\n\",\n    \"\\n\",\n    \"It's important to ensure that all broker configurations have the same required settings as the primary cluster to ensure consistent behavior.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3c8d8b00\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Testing the application\\n\",\n    \"\\n\",\n    \"To test our FastKafka 'mirroring' application, we can use our testing framework. Lets take a look how it's done:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"843030d2\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-23 12:15:51.156 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-23 12:15:51.157 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-06-23 12:15:51.157 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\\n\",\n      \"23-06-23 12:15:51.158 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:15:51.158 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\\n\",\n      \"23-06-23 12:15:51.159 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:15:51.178 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\\n\",\n      \"23-06-23 12:15:51.178 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:15:51.179 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\\n\",\n      \"23-06-23 12:15:51.180 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\\n\",\n      \"23-06-23 12:15:51.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:15:51.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"23-06-23 12:15:51.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:15:51.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:15:51.187 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\\n\",\n      \"23-06-23 12:15:51.187 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:15:51.188 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:15:51.188 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:15:51.189 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\\n\",\n      \"23-06-23 12:15:51.190 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-06-23 12:15:51.191 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\\n\",\n      \"23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:15:51.193 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:15:51.194 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-06-23 12:15:51.194 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Received on s1: msg=TestMsg(msg='signal_s1')\\n\",\n      \"Received on s2: msg=TestMsg(msg='signal_s2')\\n\",\n      \"23-06-23 12:15:56.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:15:56.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:15:56.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:15:56.183 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:15:56.184 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:15:56.184 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:15:56.185 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:15:56.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:15:56.188 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\\n\",\n    \"    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\\\"signal_s1\\\"))\\n\",\n    \"    # Assert on_preprocessed_signals_1 consumed sent message\\n\",\n    \"    await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\\n\",\n    \"        TestMsg(msg=\\\"signal_s1\\\"), timeout=5\\n\",\n    \"    )\\n\",\n    \"    # Assert app has produced a prediction\\n\",\n    \"    await tester.mirrors[app.to_predictions_1].assert_called_with(\\n\",\n    \"        TestMsg(msg=\\\"signal_s1\\\"), timeout=5\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\\n\",\n    \"    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\\\"signal_s2\\\"))\\n\",\n    \"    # Assert on_preprocessed_signals_2 consumed sent message\\n\",\n    \"    await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\\n\",\n    \"        TestMsg(msg=\\\"signal_s2\\\"), timeout=5\\n\",\n    \"    )\\n\",\n    \"    # Assert app has produced a prediction\\n\",\n    \"    await tester.mirrors[app.to_predictions_2].assert_called_with(\\n\",\n    \"        TestMsg(msg=\\\"signal_s2\\\"), timeout=5\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2ec0bd7f\",\n   \"metadata\": {},\n   \"source\": [\n    \"The usage of the `tester.mirrors` dictionary allows specifying the desired topic/broker combination for sending the test messages, especially when working with multiple Kafka clusters. \\n\",\n    \"This ensures that the data is sent to the appropriate topic/broker based on the consuming function, and consumed from appropriate topic/broker based on the producing function.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"e397ac22\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Running the application\\n\",\n    \"\\n\",\n    \"You can run your application using `fastkafka run` CLI command in the same way that you would run a single cluster app.\\n\",\n    \"\\n\",\n    \"To start your app, copy the code above in multi_cluster_example.py and run it by running:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"ca3c1c72\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"Now we can run the app. Copy the code above in multi_cluster_example.py, adjust your server configurations, and run it by running\\n\",\n       \"```shell\\n\",\n       \"fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\\n\",\n       \"```\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"script_file = \\\"multi_cluster_example.py\\\"\\n\",\n    \"filename = script_file.split(\\\".py\\\")[0]\\n\",\n    \"cmd = f\\\"fastkafka run --num-workers=1 --kafka-broker=development {filename}:app\\\"\\n\",\n    \"md(\\n\",\n    \"    f\\\"Now we can run the app. Copy the code above in {script_file}, adjust your server configurations, and run it by running\\\\n```shell\\\\n{cmd}\\\\n```\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"e34daab3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"multi_cluster_example = \\\"\\\"\\\"\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"kafka_brokers_1 = dict(\\n\",\n    \"    development=dict(url=\\\"<url_of_your_kafka_bootstrap_server_1>\\\", port=<port_of_your_kafka_bootstrap_server_1>),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"kafka_brokers_2 = dict(\\n\",\n    \"    development=dict(url=\\\"<url_of_your_kafka_bootstrap_server_2>\\\", port=<port_of_your_kafka_bootstrap_server_2>),\\n\",\n    \"    production=dict(url=\\\"prod.server_1\\\", port=9092),\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_preprocessed_signals_1(msg: TestMsg):\\n\",\n    \"    print(f\\\"Received on s1: {msg=}\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals_2(msg: TestMsg):\\n\",\n    \"    print(f\\\"Received on s2: {msg=}\\\")\\n\",\n    \"\\\"\\\"\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9192d56a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-23 12:16:04.473 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-23 12:16:04.475 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-23 12:16:04.475 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-23 12:16:04.476 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-23 12:16:04.477 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-23 12:16:04.706 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-23 12:16:04.707 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-06-23 12:16:04.708 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-06-23 12:16:05.426 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-23 12:16:07.330 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:24092\\n\",\n      \"23-06-23 12:16:08.909 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:24092\\n\",\n      \"23-06-23 12:16:08.910 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-23 12:16:08.910 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"23-06-23 12:16:08.911 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"23-06-23 12:16:08.911 [WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"23-06-23 12:16:08.912 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-06-23 12:16:09.025 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-06-23 12:16:09.026 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"Port 2181 is already in use\\n\",\n      \"23-06-23 12:16:09.027 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper startup failed, generating a new port and retrying...\\n\",\n      \"23-06-23 12:16:09.027 [INFO] fastkafka._testing.apache_kafka_broker: zookeeper new port=42347\\n\",\n      \"23-06-23 12:16:09.723 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-06-23 12:16:11.649 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:24093\\n\",\n      \"23-06-23 12:16:13.244 [INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:24093\\n\",\n      \"23-06-23 12:16:13.245 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\",\n      \"23-06-23 12:16:19.562 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-23 12:16:19.563 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 181770...\\n\",\n      \"23-06-23 12:16:21.150 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 181770 terminated.\\n\",\n      \"23-06-23 12:16:21.151 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 181382...\\n\",\n      \"23-06-23 12:16:22.486 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 181382 terminated.\\n\",\n      \"23-06-23 12:16:22.488 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\",\n      \"23-06-23 12:16:22.489 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"23-06-23 12:16:22.489 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 180408...\\n\",\n      \"23-06-23 12:16:24.073 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 180408 terminated.\\n\",\n      \"23-06-23 12:16:24.073 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 180019...\\n\",\n      \"23-06-23 12:16:25.408 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 180019 terminated.\\n\",\n      \"23-06-23 12:16:25.410 [INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"with ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"preprocessed_signals\\\"], apply_nest_asyncio=True, listener_port=24092\\n\",\n    \") as bootstrap_server_1, ApacheKafkaBroker(\\n\",\n    \"    topics=[\\\"preprocessed_signals\\\"], apply_nest_asyncio=True, listener_port=24093\\n\",\n    \") as bootstrap_server_2:\\n\",\n    \"    server_url_1 = bootstrap_server_1.split(\\\":\\\")[0]\\n\",\n    \"    server_port_1 = bootstrap_server_1.split(\\\":\\\")[1]\\n\",\n    \"    server_url_2 = bootstrap_server_2.split(\\\":\\\")[0]\\n\",\n    \"    server_port_2 = bootstrap_server_2.split(\\\":\\\")[1]\\n\",\n    \"    exit_code, output = await run_script_and_cancel(\\n\",\n    \"        script=multi_cluster_example.replace(\\n\",\n    \"            \\\"<url_of_your_kafka_bootstrap_server_1>\\\", server_url_1\\n\",\n    \"        )\\n\",\n    \"        .replace(\\\"<port_of_your_kafka_bootstrap_server_1>\\\", server_port_1)\\n\",\n    \"        .replace(\\\"<url_of_your_kafka_bootstrap_server_2>\\\", server_url_2)\\n\",\n    \"        .replace(\\\"<port_of_your_kafka_bootstrap_server_2>\\\", server_port_2),\\n\",\n    \"        script_file=script_file,\\n\",\n    \"        cmd=cmd,\\n\",\n    \"        cancel_after=5,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    expected_returncode = [0, 1]\\n\",\n    \"    assert exit_code in expected_returncode, output.decode(\\\"UTF-8\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2b6055d0\",\n   \"metadata\": {},\n   \"source\": [\n    \"In your app logs, you should see your app starting up and your two consumer functions connecting to different kafka clusters.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dacc3dd5\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24092'}\\n\",\n      \"[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24093'}\\n\",\n      \"[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\\n\",\n      \"[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\\n\",\n      \"[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\\n\",\n      \"[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\\n\",\n      \"[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \\n\",\n      \"[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"23-06-23 12:16:18.294 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 182747...\\n\",\n      \"[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:19.471 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 182747 terminated.\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"print(output.decode(\\\"UTF-8\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"770d36aa\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Application documentation\\n\",\n    \"\\n\",\n    \"At the moment the documentation for multicluster app is not yet implemented, but it is under development and you can expecti it soon!\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"fd591d33\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Examples on how to use multiple broker configurations\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1825a024\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Example #1\\n\",\n    \"\\n\",\n    \"In this section, we'll explore how you can effectively forward topics between different Kafka clusters, enabling seamless data synchronization for your applications.\\n\",\n    \"\\n\",\n    \"Imagine having two Kafka clusters, namely `kafka_brokers_1` and `kafka_brokers_2`, each hosting its own set of topics and messages. Now, if you want to forward a specific topic (in this case: `preprocessed_signals`) from kafka_brokers_1 to kafka_brokers_2, FastKafka provides an elegant solution.\\n\",\n    \"\\n\",\n    \"Let's examine the code snippet that configures our application for topic forwarding:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f8b84d48\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"kafka_brokers_1 = dict(localhost=dict(url=\\\"server_1\\\", port=9092))\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"server_2\\\", port=9092))\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_preprocessed_signals_original(msg: TestMsg):\\n\",\n    \"    await to_preprocessed_signals_forward(msg)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\\n\",\n    \"    return data\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"18ceaf23\",\n   \"metadata\": {},\n   \"source\": [\n    \"Here's how it works: our FastKafka application is configured to consume messages from `kafka_brokers_1` and process them in the `on_preprocessed_signals_original` function. We want to forward these messages to `kafka_brokers_2`. To achieve this, we define the `to_preprocessed_signals_forward` function as a producer, seamlessly producing the processed messages to the preprocessed_signals topic within the `kafka_brokers_2` cluster.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2e71340e\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Testing\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3adc619a\",\n   \"metadata\": {},\n   \"source\": [\n    \"To test our FastKafka forwarding application, we can use our testing framework. Let's take a look at the testing code snippet:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"5b6868e9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-23 12:16:31.689 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-23 12:16:31.690 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-06-23 12:16:31.691 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"23-06-23 12:16:31.691 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:31.701 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\\n\",\n      \"23-06-23 12:16:31.702 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:31.702 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:31.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\\n\",\n      \"23-06-23 12:16:31.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:31.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:31.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"23-06-23 12:16:31.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:31.708 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:31.708 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:31.709 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"23-06-23 12:16:31.709 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:16:35.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:35.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:35.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:35.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:35.705 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:35.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:35.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"819d38c7\",\n   \"metadata\": {},\n   \"source\": [\n    \"With the help of the **Tester** object, we can simulate and verify the behavior of our FastKafka application. Here's how it works:\\n\",\n    \"\\n\",\n    \"1. We create an instance of the **Tester** by passing in our *app* object, which represents our FastKafka application.\\n\",\n    \"\\n\",\n    \"2. Using the **tester.mirrors** dictionary, we can send a message to a specific Kafka broker and topic combination. In this case, we use `tester.mirrors[app.on_preprocessed_signals_original]` to send a TestMsg message with the content \\\"signal\\\" to the appropriate Kafka broker and topic.\\n\",\n    \"\\n\",\n    \"3. After sending the message, we can perform assertions on the mirrored function using `tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)`. This assertion ensures that the mirrored function has been called within a specified timeout period (in this case, 5 seconds).\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3237efbe\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Example #2\\n\",\n    \"\\n\",\n    \"In this section, we'll explore how you can effortlessly consume data from multiple sources, process it, and aggregate the results into a single topic on a specific cluster.\\n\",\n    \"\\n\",\n    \"Imagine you have two Kafka clusters: **kafka_brokers_1** and **kafka_brokers_2**, each hosting its own set of topics and messages. Now, what if you want to consume data from both clusters, perform some processing, and produce the results to a single topic on **kafka_brokers_1**? FastKafka has got you covered!\\n\",\n    \"\\n\",\n    \"Let's take a look at the code snippet that configures our application for aggregating multiple clusters:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a38fc478\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"kafka_brokers_1 = dict(localhost=dict(url=\\\"server_1\\\", port=9092))\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"server_2\\\", port=9092))\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_preprocessed_signals_1(msg: TestMsg):\\n\",\n    \"    print(f\\\"Default: {msg=}\\\")\\n\",\n    \"    await to_predictions(msg)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def on_preprocessed_signals_2(msg: TestMsg):\\n\",\n    \"    print(f\\\"Specified: {msg=}\\\")\\n\",\n    \"    await to_predictions(msg)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction: {prediction}\\\")\\n\",\n    \"    return [prediction]\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"95a1642f\",\n   \"metadata\": {},\n   \"source\": [\n    \"Here's the idea: our FastKafka application is set to consume messages from the topic \\\"preprocessed_signals\\\" on **kafka_brokers_1** cluster, as well as from the same topic on **kafka_brokers_2** cluster. We have two consuming functions, `on_preprocessed_signals_1` and `on_preprocessed_signals_2`, that handle the messages from their respective clusters. These functions perform any required processing, in this case, just calling the to_predictions function.\\n\",\n    \"\\n\",\n    \"The exciting part is that the to_predictions function acts as a producer, sending the processed results to the \\\"predictions\\\" topic on **kafka_brokers_1 cluster**. By doing so, we effectively aggregate the data from multiple sources into a single topic on a specific cluster.\\n\",\n    \"\\n\",\n    \"This approach enables you to consume data from multiple Kafka clusters, process it, and produce the aggregated results to a designated topic. Whether you're generating predictions, performing aggregations, or any other form of data processing, FastKafka empowers you to harness the full potential of multiple clusters.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d80755a2\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Testing\\n\",\n    \"\\n\",\n    \"Let's take a look at the testing code snippet:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"aadbdd9e\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-23 12:16:41.222 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-23 12:16:41.223 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-06-23 12:16:41.224 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\\n\",\n      \"23-06-23 12:16:41.224 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:41.239 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\\n\",\n      \"23-06-23 12:16:41.239 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:41.240 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"23-06-23 12:16:41.240 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\\n\",\n      \"23-06-23 12:16:41.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:41.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"23-06-23 12:16:41.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:41.248 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\\n\",\n      \"23-06-23 12:16:41.248 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"Default: msg=TestMsg(msg='signal')\\n\",\n      \"Sending prediction: msg='signal'\\n\",\n      \"Specified: msg=TestMsg(msg='signal')\\n\",\n      \"Sending prediction: msg='signal'\\n\",\n      \"23-06-23 12:16:45.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:45.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:45.244 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:45.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:45.246 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:45.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:45.247 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester.on_predictions.assert_called(timeout=5)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4ebba677\",\n   \"metadata\": {},\n   \"source\": [\n    \"Here's how the code above works:\\n\",\n    \"\\n\",\n    \"1. Within an `async with` block, create an instance of the Tester by passing in your app object, representing your FastKafka application.\\n\",\n    \"\\n\",\n    \"2. Using the tester.mirrors dictionary, you can send messages to specific Kafka broker and topic combinations. In this case, we use `tester.mirrors[app.on_preprocessed_signals_1]` and `tester.mirrors[app.on_preprocessed_signals_2]` to send TestMsg messages with the content \\\"signal\\\" to the corresponding Kafka broker and topic combinations.\\n\",\n    \"\\n\",\n    \"3. After sending the messages, you can perform assertions on the **on_predictions** function using `tester.on_predictions.assert_called(timeout=5)`. This assertion ensures that the on_predictions function has been called within a specified timeout period (in this case, 5 seconds).\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"aaf82425\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Example #3\\n\",\n    \"\\n\",\n    \"In some scenarios, you may need to produce messages to multiple Kafka clusters simultaneously. FastKafka simplifies this process by allowing you to configure your application to produce messages to multiple clusters effortlessly. Let's explore how you can achieve this:\\n\",\n    \"\\n\",\n    \"Consider the following code snippet that demonstrates producing messages to multiple clusters:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0e48106b\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"kafka_brokers_1 = dict(localhost=dict(url=\\\"server_1\\\", port=9092))\\n\",\n    \"kafka_brokers_2 = dict(localhost=dict(url=\\\"server_2\\\", port=9092))\\n\",\n    \"\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers_1)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes(topic=\\\"preprocessed_signals\\\")\\n\",\n    \"async def on_preprocessed_signals(msg: TestMsg):\\n\",\n    \"    print(f\\\"{msg=}\\\")\\n\",\n    \"    await to_predictions_1(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"    await to_predictions_2(TestMsg(msg=\\\"prediction\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions_1(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction to s1: {prediction}\\\")\\n\",\n    \"    return [prediction]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces(topic=\\\"predictions\\\", brokers=kafka_brokers_2)\\n\",\n    \"async def to_predictions_2(prediction: TestMsg) -> TestMsg:\\n\",\n    \"    print(f\\\"Sending prediction to s2: {prediction}\\\")\\n\",\n    \"    return [prediction]\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"dc670be1\",\n   \"metadata\": {},\n   \"source\": [\n    \"Here's what you need to know about producing to multiple clusters:\\n\",\n    \"\\n\",\n    \"1. We define two Kafka broker configurations: **kafka_brokers_1** and **kafka_brokers_2**, representing different clusters with their respective connection details.\\n\",\n    \"\\n\",\n    \"2. We create an instance of the FastKafka application, specifying **kafka_brokers_1** as the primary cluster for producing messages.\\n\",\n    \"\\n\",\n    \"3. The `on_preprocessed_signals` function serves as a consumer, handling incoming messages from the \\\"preprocessed_signals\\\" topic. Within this function, we invoke two producer functions: `to_predictions_1` and `to_predictions_2`.\\n\",\n    \"\\n\",\n    \"4. The `to_predictions_1` function sends predictions to the \\\"predictions\\\" topic on *kafka_brokers_1* cluster.\\n\",\n    \"\\n\",\n    \"5. Additionally, the `to_predictions_2` function sends the same predictions to the \\\"predictions\\\" topic on *kafka_brokers_2* cluster. This allows for producing the same data to multiple clusters simultaneously.\\n\",\n    \"\\n\",\n    \"By utilizing this approach, you can seamlessly produce messages to multiple Kafka clusters, enabling you to distribute data across different environments or leverage the strengths of various clusters.\\n\",\n    \"\\n\",\n    \"Feel free to customize the producer functions as per your requirements, performing any necessary data transformations or enrichment before sending the predictions.\\n\",\n    \"\\n\",\n    \"With FastKafka, producing to multiple clusters becomes a breeze, empowering you to harness the capabilities of multiple environments effortlessly.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0c1caf66\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Testing\\n\",\n    \"\\n\",\n    \"Let's take a look at the testing code snippet:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"66fdc528\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-06-23 12:16:49.903 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-06-23 12:16:49.904 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-06-23 12:16:49.904 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\\n\",\n      \"23-06-23 12:16:49.905 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:49.905 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\\n\",\n      \"23-06-23 12:16:49.906 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:49.921 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\\n\",\n      \"23-06-23 12:16:49.921 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-06-23 12:16:49.921 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:49.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\\n\",\n      \"23-06-23 12:16:49.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:49.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:49.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:49.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\\n\",\n      \"23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:49.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\\n\",\n      \"23-06-23 12:16:49.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:49.926 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\\n\",\n      \"23-06-23 12:16:49.928 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\\n\",\n      \"23-06-23 12:16:49.929 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"msg=TestMsg(msg='signal')\\n\",\n      \"Sending prediction to s1: msg='prediction'\\n\",\n      \"Sending prediction to s2: msg='prediction'\\n\",\n      \"23-06-23 12:16:53.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:53.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:53.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:53.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:53.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:53.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"async with Tester(app) as tester:\\n\",\n    \"    await tester.to_preprocessed_signals(TestMsg(msg=\\\"signal\\\"))\\n\",\n    \"    await tester.mirrors[to_predictions_1].assert_called(timeout=5)\\n\",\n    \"    await tester.mirrors[to_predictions_2].assert_called(timeout=5)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"99617426\",\n   \"metadata\": {},\n   \"source\": [\n    \"Here's how you can perform the necessary tests:\\n\",\n    \"\\n\",\n    \"1. Within an async with block, create an instance of the **Tester** by passing in your app object, representing your FastKafka application.\\n\",\n    \"\\n\",\n    \"2. Using the `tester.to_preprocessed_signals` method, you can send a TestMsg message with the content \\\"signal\\\".\\n\",\n    \"\\n\",\n    \"3. After sending the message, you can perform assertions on the to_predictions_1 and to_predictions_2 functions using `tester.mirrors[to_predictions_1].assert_called(timeout=5)` and `tester.mirrors[to_predictions_2].assert_called(timeout=5)`. These assertions ensure that the respective producer functions have produced data to their respective topic/broker combinations.\\n\",\n    \"\\n\",\n    \"By employing this testing approach, you can verify that the producing functions correctly send messages to their respective clusters. The testing framework provided by FastKafka enables you to ensure the accuracy and reliability of your application's producing logic.\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_30_Using_docker_to_deploy_fastkafka.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c8d4afc9\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Deploying FastKafka using Docker\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a7311d5f\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Building a Docker Image\\n\",\n    \"\\n\",\n    \"To build a Docker image for a FastKafka project, we need the following items:\\n\",\n    \"\\n\",\n    \"1. A library that is built using FastKafka.\\n\",\n    \"2. A file in which the requirements are specified. This could be a requirements.txt file, a setup.py file, or even a wheel file.\\n\",\n    \"3. A Dockerfile to build an image that will include the two files mentioned above.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"8e7b9e7a\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Creating FastKafka Code\\n\",\n    \"\\n\",\n    \"Let's create a `FastKafka`-based application and write it to the `application.py` file based on the [tutorial](/docs#tutorial).\\n\",\n    \"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file\\n\",\n    \"\\n\",\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from pydantic import BaseModel, NonNegativeFloat, Field\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\\n\",\n    \"\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"44a9370f\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Creating requirements.txt file\\n\",\n    \"\\n\",\n    \"The above code only requires FastKafka. So, we will add only that to the `requirements.txt` file, but you can add additional requirements to it as well.\\n\",\n    \"\\n\",\n    \"```txt\\n\",\n    \"fastkafka>=0.3.0\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Here we are using `requirements.txt` to store the project's dependencies. However, other methods like `setup.py`, `pipenv`, and `wheel` files can also be used. `setup.py` is commonly used for packaging and distributing Python modules, while `pipenv` is a tool used for managing virtual environments and package dependencies. `wheel` files are built distributions of Python packages that can be installed with pip.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"911436ab\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Creating Dockerfile\\n\",\n    \"\\n\",\n    \"```{ .dockerfile .annotate }\\n\",\n    \"# (1)\\n\",\n    \"FROM python:3.9-slim-bullseye\\n\",\n    \"# (2)\\n\",\n    \"WORKDIR /project\\n\",\n    \"# (3)\\n\",\n    \"COPY application.py requirements.txt /project/\\n\",\n    \"# (4)\\n\",\n    \"RUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\\n\",\n    \"# (5)\\n\",\n    \"CMD [\\\"fastkafka\\\", \\\"run\\\", \\\"--num-workers\\\", \\\"2\\\", \\\"--kafka-broker\\\", \\\"production\\\", \\\"application:kafka_app\\\"]\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"1. Start from the official Python base image.\\n\",\n    \"\\n\",\n    \"2. Set the current working directory to `/project`.\\n\",\n    \"\\n\",\n    \"    This is where we'll put the `requirements.txt` file and the `application.py` file.\\n\",\n    \"\\n\",\n    \"3. Copy the `application.py` file and `requirements.txt` file inside the `/project` directory.\\n\",\n    \"\\n\",\n    \"4. Install the package dependencies in the requirements file.\\n\",\n    \"\\n\",\n    \"    The `--no-cache-dir` option tells `pip` to not save the downloaded packages locally, as that is only if `pip` was going to be run again to install the same packages, but that's not the case when working with containers.\\n\",\n    \"\\n\",\n    \"    The `--upgrade` option tells `pip` to upgrade the packages if they are already installed.\\n\",\n    \"\\n\",\n    \"5. Set the **command** to run the `fastkafka run` command.\\n\",\n    \"\\n\",\n    \"    `CMD` takes a list of strings, each of these strings is what you would type in the command line separated by spaces.\\n\",\n    \"\\n\",\n    \"    This command will be run from the **current working directory**, the same `/project` directory you set above with `WORKDIR /project`.\\n\",\n    \"\\n\",\n    \"    We supply additional parameters `--num-workers` and `--kafka-broker` for the run command. Finally, we specify the location of our FastKafka application as a command argument.\\n\",\n    \"    \\n\",\n    \"    To learn more about `fastkafka run` command please check the [CLI docs](../../cli/fastkafka/#fastkafka-run).\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"2ad51d39\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Build the Docker Image\\n\",\n    \"\\n\",\n    \"Now that all the files are in place, let's build the container image.\\n\",\n    \"\\n\",\n    \"1. Go to the project directory (where your `Dockerfile` is, containing your `application.py` file).\\n\",\n    \"2. Run the following command to build the image:\\n\",\n    \"    \\n\",\n    \"    ```cmd\\n\",\n    \"    docker build -t fastkafka_project_image .\\n\",\n    \"    ```\\n\",\n    \"    \\n\",\n    \"    This command will create a docker image with the name `fastkafka_project_image` and the `latest` tag.\\n\",\n    \"   \\n\",\n    \"That's it! You have now built a docker image for your FastKafka project.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"bfe73a22\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Start the Docker Container\\n\",\n    \"\\n\",\n    \"Run a container based on the built image:\\n\",\n    \"```cmd\\n\",\n    \"docker run -d --name fastkafka_project_container fastkafka_project_image\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"eec10a57\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Additional Security\\n\",\n    \"\\n\",\n    \"`Trivy` is an open-source tool that scans Docker images for vulnerabilities. It can be integrated into your CI/CD pipeline to ensure that your images are secure and free from known vulnerabilities. Here's how you can use `trivy` to scan your `fastkafka_project_image`:\\n\",\n    \"\\n\",\n    \"1. Install `trivy` on your local machine by following the instructions provided in the [official `trivy` documentation](https://aquasecurity.github.io/trivy/latest/getting-started/installation/).\\n\",\n    \"\\n\",\n    \"2. Run the following command to scan your fastkafka_project_image:\\n\",\n    \"    \\n\",\n    \"    ```cmd\\n\",\n    \"    trivy image fastkafka_project_image\\n\",\n    \"    ```\\n\",\n    \"    \\n\",\n    \"    This command will scan your `fastkafka_project_image` for any vulnerabilities and provide you with a report of its findings.\\n\",\n    \"\\n\",\n    \"3. Fix any vulnerabilities identified by `trivy`. You can do this by updating the vulnerable package to a more secure version or by using a different package altogether.\\n\",\n    \"\\n\",\n    \"4. Rebuild your `fastkafka_project_image` and repeat steps 2 and 3 until `trivy` reports no vulnerabilities.\\n\",\n    \"\\n\",\n    \"By using `trivy` to scan your Docker images, you can ensure that your containers are secure and free from known vulnerabilities.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9e2f403c\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Example repo\\n\",\n    \"\\n\",\n    \"A `FastKafka` based library which uses above mentioned Dockerfile to build a docker image can be found [here](https://github.com/airtai/sample_fastkafka_project/)\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_31_Using_redpanda_to_test_fastkafka.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import inspect\\n\",\n    \"import os\\n\",\n    \"import shutil\\n\",\n    \"from pathlib import Path\\n\",\n    \"from pprint import pprint\\n\",\n    \"from tempfile import TemporaryDirectory\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"import pytest\\n\",\n    \"from aiokafka import AIOKafkaProducer\\n\",\n    \"from fastcore.basics import patch\\n\",\n    \"from IPython.display import Markdown\\n\",\n    \"\\n\",\n    \"from fastkafka._helpers import get_collapsible_admonition, source2markdown\\n\",\n    \"from fastkafka.testing import mock_AIOKafkaProducer_send, run_script_and_cancel\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Using Redpanda to test FastKafka\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## What is FastKafka?\\n\",\n    \"\\n\",\n    \"[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use Python library for building asynchronous services that interact with Kafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/), [AIOKafka](https://github.com/aio-libs/aiokafka) and [AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process of writing producers and consumers for Kafka topics, handling all the parsing, networking, task scheduling and data generation automatically. With FastKafka, you can quickly prototype and develop high-performance Kafka-based services with minimal code, making it an ideal choice for developers looking to streamline their workflow and accelerate their projects.\\n\",\n    \"\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## What is Redpanda?\\n\",\n    \"\\n\",\n    \"Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools work out of the box with Redpanda.\\n\",\n    \"\\n\",\n    \"From [redpanda.com](https://redpanda.com/):\\n\",\n    \"\\n\",\n    \"> Redpanda is a Kafka®-compatible streaming data platform that is proven to be 10x faster and 6x lower in total costs. It is also JVM-free, ZooKeeper®-free, Jepsen-tested and source available.\\n\",\n    \"\\n\",\n    \"Some of the advantages of Redpanda over Kafka are\\n\",\n    \"\\n\",\n    \"1. A single binary with built-in everything, no ZooKeeper® or JVM needed.\\n\",\n    \"2. Costs upto 6X less than Kafka.\\n\",\n    \"3. Up to 10x lower average latencies and up to 6x faster Kafka transactions without compromising correctness.\\n\",\n    \"\\n\",\n    \"To learn more about Redpanda, please visit their [website](https://redpanda.com/) or checkout this [blog post](https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark) comparing Redpanda and Kafka's performance benchmarks.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Example repo\\n\",\n    \"\\n\",\n    \"A sample FastKafka-based library that uses Redpanda for testing, based on this guide, can be found [here](https://github.com/airtai/sample_fastkafka_with_redpanda).\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## The process\\n\",\n    \"\\n\",\n    \"Here are the steps we’ll be walking through to build our example:\\n\",\n    \"\\n\",\n    \"1. Set up the prerequisites.\\n\",\n    \"2. Clone the example repo.\\n\",\n    \"3. Explain how to write an application using FastKafka.\\n\",\n    \"4. Explain how to write a test case to test FastKafka with Redpanda.\\n\",\n    \"5. Run the test case and produce/consume messages.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 1. Prerequisites\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"Before starting, make sure you have the following prerequisites set up:\\n\",\n    \"\\n\",\n    \"1. **Python 3.x**: A Python 3.x installation is required to run FastKafka. You can download the latest version of Python from the [official website](https://www.python.org/downloads/). You'll also need to have pip installed and updated, which is Python's package installer.\\n\",\n    \"2. **Docker Desktop**: Docker is used to run Redpanda, which is required for testing FastKafka. You can download and install Docker Desktop from the [official website](https://www.docker.com/products/docker-desktop/).\\n\",\n    \"3. **Git**: You'll need to have Git installed to clone the example repo. You can download Git from the [official website](https://git-scm.com/downloads).\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 2. Cloning and setting up the example repo\\n\",\n    \"\\n\",\n    \"To get started with the example code, clone the [GitHub repository](https://github.com/airtai/sample_fastkafka_with_redpanda) by running the following command in your terminal:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\\n\",\n    \"cd sample_fastkafka_with_redpanda\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This will create a new directory called sample_fastkafka_with_redpanda and download all the necessary files.\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"### Create a virtual environment\\n\",\n    \"\\n\",\n    \"Before writing any code, let’s [create a new virtual environment](https://docs.python.org/3/library/venv.html#module-venv) for our project.\\n\",\n    \"\\n\",\n    \" A virtual environment is an isolated environment for a Python project, which allows you to manage project-specific dependencies and avoid conflicts between different projects.\\n\",\n    \"\\n\",\n    \"To create a new virtual environment, run the following commands in your terminal:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"python3 -m venv venv\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This will create a new directory called `venv` in your project directory, which will contain the virtual environment.\\n\",\n    \"\\n\",\n    \"To activate the virtual environment, run the following command:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"source venv/bin/activate\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This will change your shell's prompt to indicate that you are now working inside the virtual environment.\\n\",\n    \"\\n\",\n    \"Finally, run the following command to upgrade `pip`, the Python package installer:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"pip install --upgrade pip\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"### Install Python dependencies\\n\",\n    \"\\n\",\n    \"Next, let's install the required Python dependencies. In this guide, we'll be using `FastKafka` to write our application code and `pytest` and `pytest-asyncio` to test it.\\n\",\n    \"\\n\",\n    \"You can install the dependencies from the `requirements.txt` file provided in the cloned repository by running:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"pip install -r requirements.txt\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This will install all the required packages and their dependencies.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 3. Writing server code\\n\",\n    \"\\n\",\n    \"The `application.py` file in the cloned repository demonstrates how to use FastKafka to consume messages from a Kafka topic, make predictions using a predictive model, and publish the predictions to another Kafka topic. Here is an explanation of the code:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Preparing the demo model\\n\",\n    \"\\n\",\n    \"First we will prepare our model using the Iris dataset so that we can demonstrate the predictions using FastKafka. The following call downloads the dataset and trains the model.\\n\",\n    \"\\n\",\n    \"We will wrap the model creation into a lifespan of our app so that the model is created just before the app is started.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from contextlib import asynccontextmanager\\n\",\n    \"\\n\",\n    \"from sklearn.datasets import load_iris\\n\",\n    \"from sklearn.linear_model import LogisticRegression\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"ml_models = {}\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@asynccontextmanager\\n\",\n    \"async def lifespan(app: FastKafka):\\n\",\n    \"    # Load the ML model\\n\",\n    \"    X, y = load_iris(return_X_y=True)\\n\",\n    \"    ml_models[\\\"iris_predictor\\\"] = LogisticRegression(random_state=0, max_iter=500).fit(\\n\",\n    \"        X, y\\n\",\n    \"    )\\n\",\n    \"    yield\\n\",\n    \"    # Clean up the ML models and release the resources\\n\",\n    \"    ml_models.clear()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Messages\\n\",\n    \"\\n\",\n    \"FastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input JSON-encoded data into Python objects, making it easy to work with structured data in your Kafka-based applications. Pydantic's [`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you to define messages using a declarative syntax, making it easy to specify the fields and types of your messages.\\n\",\n    \"\\n\",\n    \"This example defines two message classes for use in a FastKafka application:\\n\",\n    \"\\n\",\n    \"- The `IrisInputData` class is used to represent input data for a predictive model. It has four fields of type [`NonNegativeFloat`](https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat), which is a subclass of float that only allows non-negative floating point values.\\n\",\n    \"\\n\",\n    \"- The `IrisPrediction` class is used to represent the output of the predictive model. It has a single field `species` of type string representing the predicted species.\\n\",\n    \"\\n\",\n    \"These message classes will be used to parse and validate incoming data in Kafka consumers and producers.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisInputData(BaseModel):\\n\",\n    \"    sepal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    sepal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Sepal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_length: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal length in cm\\\"\\n\",\n    \"    )\\n\",\n    \"    petal_width: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Petal width in cm\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class IrisPrediction(BaseModel):\\n\",\n    \"    species: str = Field(..., example=\\\"setosa\\\", description=\\\"Predicted species\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Application\\n\",\n    \"\\n\",\n    \"This example shows how to initialize a FastKafka application.\\n\",\n    \"\\n\",\n    \"It starts by defining a dictionary called `kafka_brokers`, which contains two entries: `\\\"localhost\\\"` and `\\\"production\\\"`, specifying local development and production Kafka brokers. Each entry specifies the URL, port, and other details of a Kafka broker. This dictionary is used both to generate documentation and to later run the server against one of the given kafka broker.\\n\",\n    \"\\n\",\n    \"Next, an instance of the `FastKafka` class is initialized with the minimum required arguments:\\n\",\n    \"\\n\",\n    \"- `kafka_brokers`: a dictionary used for generating documentation\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Iris predictions\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \"    lifespan=lifespan,\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Function decorators\\n\",\n    \"\\n\",\n    \"FastKafka provides convenient function decorators `@kafka_app.consumes` and `@kafka_app.produces` to allow you to delegate the actual process of\\n\",\n    \"\\n\",\n    \"- consuming and producing data to Kafka, and\\n\",\n    \"\\n\",\n    \"- decoding and encoding JSON encode messages\\n\",\n    \"\\n\",\n    \"from user defined functions to the framework. The FastKafka framework delegates these jobs to AIOKafka and Pydantic libraries.\\n\",\n    \"\\n\",\n    \"These decorators make it easy to specify the processing logic for your Kafka consumers and producers, allowing you to focus on the core business logic of your application without worrying about the underlying Kafka integration.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"This following example shows how to use the `@kafka_app.consumes` and `@kafka_app.produces` decorators in a FastKafka application:\\n\",\n    \"\\n\",\n    \"- The `@kafka_app.consumes` decorator is applied to the `on_input_data` function, which specifies that this function should be called whenever a message is received on the \\\"input_data\\\" Kafka topic. The `on_input_data` function takes a single argument which is expected to be an instance of the `IrisInputData` message class. Specifying the type of the single argument is instructing the Pydantic to use `IrisInputData.parse_raw()` on the consumed message before passing it to the user defined function `on_input_data`.\\n\",\n    \"\\n\",\n    \"- The `@produces` decorator is applied to the `to_predictions` function, which specifies that this function should produce a message to the \\\"predictions\\\" Kafka topic whenever it is called. The `to_predictions` function takes a single integer argument `species_class` representing one of three possible strign values predicted by the mdoel. It creates a new `IrisPrediction` message using this value and then returns it. The framework will call the `IrisPrediction.json().encode(\\\"utf-8\\\")` function on the returned value and produce it to the specified topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: IrisInputData):\\n\",\n    \"    species_class = ml_models[\\\"iris_predictor\\\"].predict(\\n\",\n    \"        [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\\n\",\n    \"    )[0]\\n\",\n    \"\\n\",\n    \"    await to_predictions(species_class)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"predictions\\\")\\n\",\n    \"async def to_predictions(species_class: int) -> IrisPrediction:\\n\",\n    \"    iris_species = [\\\"setosa\\\", \\\"versicolor\\\", \\\"virginica\\\"]\\n\",\n    \"\\n\",\n    \"    prediction = IrisPrediction(species=iris_species[species_class])\\n\",\n    \"    return prediction\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 4. Writing the test code\\n\",\n    \"\\n\",\n    \"The service can be tested using the `Tester` instance which can be configured to start a [Redpanda broker](../../api/fastkafka/testing/LocalRedpandaBroker/) for testing purposes. The `test.py` file in the cloned repository contains the following code for testing.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import pytest\\n\",\n    \"from application import IrisInputData, IrisPrediction, kafka_app\\n\",\n    \"\\n\",\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"msg = IrisInputData(\\n\",\n    \"    sepal_length=0.1,\\n\",\n    \"    sepal_width=0.2,\\n\",\n    \"    petal_length=0.3,\\n\",\n    \"    petal_width=0.4,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@pytest.mark.asyncio\\n\",\n    \"async def test():\\n\",\n    \"    # Start Tester app and create local Redpanda broker for testing\\n\",\n    \"    async with Tester(kafka_app).using_local_redpanda(\\n\",\n    \"        tag=\\\"v23.1.2\\\", listener_port=9092\\n\",\n    \"    ) as tester:\\n\",\n    \"        # Send IrisInputData message to input_data topic\\n\",\n    \"        await tester.to_input_data(msg)\\n\",\n    \"\\n\",\n    \"        # Assert that the kafka_app responded with IrisPrediction in predictions topic\\n\",\n    \"        await tester.awaited_mocks.on_predictions.assert_awaited_with(\\n\",\n    \"            IrisPrediction(species=\\\"setosa\\\"), timeout=2\\n\",\n    \"        )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"The `Tester` module utilizes uses `LocalRedpandaBroker` to start and stop a Redpanda broker for testing purposes using Docker\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 5. Running the tests\\n\",\n    \"\\n\",\n    \"We can run the tests which is in `test.py` file by executing the following command:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"pytest test.py\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"This will start a Redpanda broker using Docker and executes tests. The output of the command is:\\n\",\n    \"\\n\",\n    \"```cmd\\n\",\n    \"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\\n\",\n    \"============================== test session starts ===============================\\n\",\n    \"platform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\\n\",\n    \"rootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\\n\",\n    \"plugins: asyncio-0.21.0, anyio-3.6.2\\n\",\n    \"asyncio: mode=strict\\n\",\n    \"collected 1 item                                                                 \\n\",\n    \"\\n\",\n    \"test.py .                                                                  [100%]\\n\",\n    \"\\n\",\n    \"=============================== 1 passed in 7.28s ================================\\n\",\n    \"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"Running the tests with the Redpanda broker ensures that your code is working correctly with a real Kafka-like message broker, making your tests more reliable. \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Recap\\n\",\n    \"\\n\",\n    \"We have created an Iris classification model and encapulated it into our `FastKafka` application.\\n\",\n    \"The app will consume the `IrisInputData` from the `input_data` topic and produce the predictions to `predictions` topic.\\n\",\n    \"\\n\",\n    \"To test the app we have:\\n\",\n    \"\\n\",\n    \"1. Created the app\\n\",\n    \"\\n\",\n    \"2. Started our `Tester` class with `Redpanda` broker which mirrors the developed app topics for testing purposes\\n\",\n    \"\\n\",\n    \"3. Sent `IrisInputData` message to `input_data` topic\\n\",\n    \"\\n\",\n    \"4. Asserted and checked that the developed iris classification service has reacted to `IrisInputData` message \"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 1\n}\n"
  },
  {
    "path": "nbs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"650462ec\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Using FastAPI to Run FastKafka Application\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"21a8701d\",\n   \"metadata\": {},\n   \"source\": [\n    \"When deploying a FastKafka application, the default approach is to utilize the [`fastkafka run`](/docs/cli/fastkafka#fastkafka-run) CLI command. This command allows you to launch your FastKafka application as a standalone service. However, if you already have a FastAPI application in place and wish to run FastKafka application alongside it, you have an alternative option.\\n\",\n    \"\\n\",\n    \"FastKafka provides a method called `FastKafka.fastapi_lifespan` that leverages [FastAPI's lifespan](https://fastapi.tiangolo.com/advanced/events/#lifespan-events) feature. This method allows you to run your FastKafka application together with your existing FastAPI app, seamlessly integrating their functionalities. By using the `FastKafka.fastapi_lifespan` method, you can start the FastKafka application within the same process as the FastAPI app.\\n\",\n    \"\\n\",\n    \"The `FastKafka.fastapi_lifespan` method ensures that both FastAPI and FastKafka are initialized and start working simultaneously. This approach enables the execution of Kafka-related tasks, such as producing and consuming messages, while also handling HTTP requests through FastAPI's routes.\\n\",\n    \"\\n\",\n    \"By combining FastAPI and FastKafka in this manner, you can build a comprehensive application that harnesses the power of both frameworks. Whether you require real-time messaging capabilities or traditional HTTP endpoints, this approach allows you to leverage the strengths of FastAPI and FastKafka within a single deployment setup.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"89f5a924\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Prerequisites\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"1. A basic knowledge of `FastKafka` is needed to proceed with this guide. If you are not familiar with `FastKafka`, please go through the [tutorial](/docs#tutorial) first.\\n\",\n    \"2. `FastKafka` and `FastAPI` libraries needs to be installed.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"7af8a037\",\n   \"metadata\": {},\n   \"source\": [\n    \"This guide will provide a step-by-step explanation, taking you through each stage individually, before combining all the components in the final section for a comprehensive understanding of the process.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"40ecc6d5\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 1. Basic FastKafka app\\n\",\n    \"\\n\",\n    \"In this step, we will begin by creating a simple FastKafka application.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"abd847c7\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Greetings\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes()\\n\",\n    \"async def on_names(msg: TestMsg):\\n\",\n    \"    await to_greetings(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces()\\n\",\n    \"async def to_greetings(greeting: TestMsg) -> TestMsg:\\n\",\n    \"    return greeting\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f7328cb1\",\n   \"metadata\": {},\n   \"source\": [\n    \"In the above example, we consume messages from a topic called `names`, we prepend \\\"Hello\\\" to the message, and send it back to another topic called `greetings`.\\n\",\n    \"\\n\",\n    \"We now have a simple `FastKafka` app to produce and consume from two topics.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1185a1df\",\n   \"metadata\": {},\n   \"source\": [\n    \"## 2. Using fastapi_lifespan method\\n\",\n    \"\\n\",\n    \"In this step of the guide, we will explore the integration of a FastKafka application with a FastAPI application using the `FastKafka.fastapi_lifespan` method. \\n\",\n    \"The `FastKafka.fastapi_lifespan` method is a feature provided by FastKafka, which allows you to seamlessly integrate a FastKafka application with a FastAPI application by leveraging FastAPI's lifespan feature.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"1e9cfed2\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from fastapi import FastAPI\\n\",\n    \"\\n\",\n    \"fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name=\\\"localhost\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@fastapi_app.get(\\\"/hello\\\")\\n\",\n    \"async def hello():\\n\",\n    \"    return {\\\"msg\\\": \\\"hello there\\\"}\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"144e4767\",\n   \"metadata\": {},\n   \"source\": [\n    \"In the above example, a new instance of the `FastAPI` app is created, and when the app is started using uvicorn, it also runs the `FastKafka` application concurrently.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"9a9a0851\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Putting it all together\\n\",\n    \"\\n\",\n    \"Let's put the above code together and write it in a file called `fast_apps.py`.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"856a25e9\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"# content of the \\\"fast_apps.py\\\" file\\n\",\n       \"\\n\",\n       \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n       \"from typing import *\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"    },\\n\",\n       \"    \\\"production\\\": {\\n\",\n       \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n       \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n       \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Greetings\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"class TestMsg(BaseModel):\\n\",\n       \"    msg: str = Field(...)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.consumes()\\n\",\n       \"async def on_names(msg: TestMsg):\\n\",\n       \"    await to_greetings(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces()\\n\",\n       \"async def to_greetings(greeting: TestMsg) -> TestMsg:\\n\",\n       \"    return greeting\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"from fastapi import FastAPI\\n\",\n       \"\\n\",\n       \"fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(\\\"localhost\\\"))\\n\",\n       \"\\n\",\n       \"@fastapi_app.get(\\\"/hello\\\")\\n\",\n       \"async def hello():\\n\",\n       \"    return {\\\"msg\\\": \\\"hello there\\\"}\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"from IPython.display import Markdown\\n\",\n    \"\\n\",\n    \"kafka_app_source = \\\"\\\"\\\"\\n\",\n    \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"from typing import *\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Greetings\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes()\\n\",\n    \"async def on_names(msg: TestMsg):\\n\",\n    \"    await to_greetings(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces()\\n\",\n    \"async def to_greetings(greeting: TestMsg) -> TestMsg:\\n\",\n    \"    return greeting\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from fastapi import FastAPI\\n\",\n    \"\\n\",\n    \"fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name=\\\"localhost\\\"))\\n\",\n    \"\\n\",\n    \"@fastapi_app.get(\\\"/hello\\\")\\n\",\n    \"async def hello():\\n\",\n    \"    return {\\\"msg\\\": \\\"hello there\\\"}\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"with open(\\\"fast_apps.py\\\", \\\"w\\\") as source:\\n\",\n    \"    source.write(kafka_app_source)\\n\",\n    \"\\n\",\n    \"Markdown(\\n\",\n    \"    f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"fast_apps.py\\\" file\\n\",\n    \"{kafka_app_source}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"cc797d12\",\n   \"metadata\": {},\n   \"source\": [\n    \"Finally, you can run the FastAPI application using a web server of your choice, such as Uvicorn or Hypercorn by running the below command:\\n\",\n    \"```cmd\\n\",\n    \"uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"289aff74\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-19 12:44:13.150 [INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"23-05-19 12:44:13.151 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-05-19 12:44:13.151 [INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"23-05-19 12:44:13.152 [INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"23-05-19 12:44:13.153 [INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"23-05-19 12:44:13.767 [INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"23-05-19 12:44:15.435 [INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"23-05-19 12:44:16.894 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\\n\",\n      \"23-05-19 12:44:16.908 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\\n\",\n      \"23-05-19 12:44:16.909 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-05-19 12:44:16.909 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'app_for_tester_group', 'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\\n\",\n      \"23-05-19 12:44:16.914 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-05-19 12:44:16.915 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\\n\",\n      \"23-05-19 12:44:16.915 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-05-19 12:44:16.916 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'names'})\\n\",\n      \"23-05-19 12:44:16.916 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'names'}\\n\",\n      \"23-05-19 12:44:16.916 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-05-19 12:44:16.921 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-05-19 12:44:16.921 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'greetings'})\\n\",\n      \"23-05-19 12:44:16.922 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'greetings'}\\n\",\n      \"23-05-19 12:44:16.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-05-19 12:44:16.923 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group app_for_tester_group\\n\",\n      \"23-05-19 12:44:16.923 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group app_for_tester_group\\n\",\n      \"23-05-19 12:44:16.923 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group app_for_tester_group\\n\",\n      \"23-05-19 12:44:16.926 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'greetings': 1}. \\n\",\n      \"23-05-19 12:44:16.941 [INFO] aiokafka.consumer.group_coordinator: Joined group 'app_for_tester_group' (generation 1) with member_id aiokafka-0.8.0-f6d0234a-2fdd-420c-9770-f521138d7ba4\\n\",\n      \"23-05-19 12:44:16.942 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\\n\",\n      \"23-05-19 12:44:16.975 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group app_for_tester_group with generation 1\\n\",\n      \"23-05-19 12:44:16.976 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='names', partition=0)} for group app_for_tester_group\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Started server process [574015]\\n\",\n      \"INFO:     Waiting for application startup.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-19 12:44:19.946 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\\n\",\n      \"23-05-19 12:44:19.947 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-05-19 12:44:19.953 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-05-19 12:44:19.954 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Application startup complete.\\n\",\n      \"INFO:     Uvicorn running on http://0.0.0.0:8080 (Press CTRL+C to quit)\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-19 12:44:19.961 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-05-19 12:44:19.961 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'names'})\\n\",\n      \"23-05-19 12:44:19.962 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'names'}\\n\",\n      \"23-05-19 12:44:19.963 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-05-19 12:44:19.966 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'names': 1}. \\n\",\n      \"INFO:     127.0.0.1:56272 - \\\"GET /hello HTTP/1.1\\\" 200 OK\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Shutting down\\n\",\n      \"INFO:     Waiting for application shutdown.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-19 12:44:26.148 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-05-19 12:44:26.149 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:     Application shutdown complete.\\n\",\n      \"INFO:     Finished server process [574015]\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-19 12:44:26.157 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-05-19 12:44:26.157 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-05-19 12:44:26.266 [INFO] aiokafka.consumer.group_coordinator: LeaveGroup request succeeded\\n\",\n      \"23-05-19 12:44:26.268 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-05-19 12:44:26.268 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-05-19 12:44:26.269 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 572846...\\n\",\n      \"23-05-19 12:44:27.853 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 572846 terminated.\\n\",\n      \"23-05-19 12:44:27.854 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 572474...\\n\",\n      \"23-05-19 12:44:29.183 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 572474 terminated.\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"from fastkafka.testing import Tester\\n\",\n    \"from fastkafka._server import run_in_process\\n\",\n    \"import uvicorn\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"import asyncio\\n\",\n    \"import requests\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def run_uvicorn():\\n\",\n    \"    uvicorn.run(\\n\",\n    \"        \\\"fast_apps:fastapi_app\\\",\\n\",\n    \"        host=\\\"0.0.0.0\\\",\\n\",\n    \"        port=8080,\\n\",\n    \"        reload=False,\\n\",\n    \"        log_level=\\\"debug\\\",\\n\",\n    \"        workers=1,\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"app_for_tester = FastKafka(\\n\",\n    \"    kafka_brokers=dict(localhost=dict(url=\\\"localhost\\\", port=9092)),\\n\",\n    \"    group_id=\\\"app_for_tester_group\\\",\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app_for_tester.consumes(topic=\\\"names\\\")\\n\",\n    \"async def on_app_for_tester_names(msg: TestMsg):\\n\",\n    \"    await to_app_for_tester_greetings(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app_for_tester.produces(topic=\\\"greetings\\\")\\n\",\n    \"async def to_app_for_tester_greetings(greeting: TestMsg) -> TestMsg:\\n\",\n    \"    return greeting\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async with Tester(app_for_tester) as tester:\\n\",\n    \"    with run_in_process(run_uvicorn) as p:\\n\",\n    \"        await asyncio.sleep(3)\\n\",\n    \"        res = requests.get(\\\"http://127.0.0.1:8080/hello\\\")\\n\",\n    \"        assert res.ok\\n\",\n    \"\\n\",\n    \"        await tester.to_names(TestMsg(msg=f\\\"signal 10\\\"))\\n\",\n    \"        await asyncio.sleep(3)\\n\",\n    \"        assert (\\n\",\n    \"            tester.mocks.on_greetings.call_count == 2\\n\",\n    \"        ), tester.mocks.on_greetings.call_count\\n\",\n    \"\\n\",\n    \"    p.close()\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"a6d56fba\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/guides/Guide_33_Using_Tester_class_to_test_fastkafka.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d99f1cb7\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Using Tester to test FastKafka\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"675bd571\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"from fastkafka._application.app import FastKafka\\n\",\n    \"from fastkafka._application.tester import Tester\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"from typing import List, Optional\\n\",\n    \"from IPython.display import Markdown, display_markdown\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"77293df7\",\n   \"metadata\": {},\n   \"source\": [\n    \"In order to speed up development and make testing easier, we have implemented the [Tester](../../api/fastkafka/testing/Tester/) class. \\n\",\n    \"\\n\",\n    \"The [Tester](../../api/fastkafka/testing/Tester/) instance starts in-memory implementation of Kafka broker i.e. there is no need for starting localhost Kafka service for testing FastKafka apps.\\n\",\n    \"The [Tester](../../api/fastkafka/testing/Tester/) will redirect `consumes` and `produces` decorated functions to the in-memory Kafka broker so that you can quickly test FasKafka apps without the need of a running Kafka broker and all its dependencies. Also, for each FastKafka `consumes` and `produces` function, [Tester](../../api/fastkafka/testing/Tester/) will create it's mirrored fuction i.e. if the `consumes` function is implemented, the [Tester](../../api/fastkafka/testing/Tester/) will create the `produces` function (and the other way - if the `produces` function is implemented, [Tester](../../api/fastkafka/testing/Tester/) will create `consumes` function).\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"3e9ea6d4\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# allows async calls in notebooks\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9cde8241\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"# | notest\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1278854a\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Basic example\\n\",\n    \"\\n\",\n    \"To showcase the functionalities of FastKafka and illustrate the concepts discussed, we can use a simple test message called `TestMsg`. Here's the definition of the `TestMsg` class:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"c7754a58\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"test_msg = TestMsg(msg=\\\"signal\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d15934a5\",\n   \"metadata\": {},\n   \"source\": [\n    \"In this example we have implemented `FastKafka` app with one `consumes` and one `produces` function. `on_input` function consumes messages from the `input` topic and `to_output` function produces messages to the `output` topic.\\n\",\n    \"\\n\",\n    \"**Note**: it is necessary to define parameter and return types in the produces and consumes functions\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"27b9417e\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"app = FastKafka()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_input(msg: TestMsg):\\n\",\n    \"    await to_output(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_output(msg: TestMsg) -> TestMsg:\\n\",\n    \"    return msg\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4b1ce364\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Testing the application\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c0155687\",\n   \"metadata\": {},\n   \"source\": [\n    \"In this example `app` has imlemented `on_input` and `to_output` functions. We can now use [Tester](../../api/fastkafka/testing/Tester/) to create their mirrored functions: `to_input` and `on_output`. \\n\",\n    \"\\n\",\n    \"Testing process for this example could look like this:\\n\",\n    \"\\n\",\n    \"1. `tester` produces the message to the `input` topic\\n\",\n    \"\\n\",\n    \"2. Assert that the `app` consumed the message by calling `on_input` with the accurate argument\\n\",\n    \"\\n\",\n    \"3. Within `on_input` function, `to_output` function is called - and message is produced to the `output` topic\\n\",\n    \"\\n\",\n    \"4. Assert that the `tester` consumed the message by calling `on_output` with the accurate argument\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"0371097d\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-07-31 10:38:30.810 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-07-31 10:38:30.811 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-07-31 10:38:30.812 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-31 10:38:30.812 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:30.826 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-31 10:38:30.827 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:30.827 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:30.828 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-31 10:38:30.828 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:30.829 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:30.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:30.830 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input']\\n\",\n      \"23-07-31 10:38:30.830 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:30.835 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:30.835 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-31 10:38:30.836 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:30.836 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:30.836 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:30.837 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output']\\n\",\n      \"23-07-31 10:38:30.837 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:34.828 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:34.828 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:34.829 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:34.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:34.830 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:34.831 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:34.831 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:34.832 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:34.832 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"async with Tester(app).using_inmemory_broker() as tester:\\n\",\n    \"    input_msg = TestMsg(msg=\\\"Mickey\\\")\\n\",\n    \"\\n\",\n    \"    # tester produces message to the input topic\\n\",\n    \"    await tester.to_input(input_msg)\\n\",\n    \"    # previous line is equal to\\n\",\n    \"    # await tester.mirrors[app.on_input](input_msg)\\n\",\n    \"\\n\",\n    \"    # assert that app consumed from the input topic and it was called with the accurate argument\\n\",\n    \"    await app.awaited_mocks.on_input.assert_called_with(\\n\",\n    \"        TestMsg(msg=\\\"Mickey\\\"), timeout=5\\n\",\n    \"    )\\n\",\n    \"    # assert that tester consumed from the output topic and it was called with the accurate argument\\n\",\n    \"    await tester.on_output.assert_called_with(TestMsg(msg=\\\"Hello Mickey\\\"), timeout=5)\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"1b612c56\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Final script\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9b94ed56\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-07-31 10:38:34.855 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-07-31 10:38:34.856 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-07-31 10:38:34.856 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-31 10:38:34.857 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:34.871 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"23-07-31 10:38:34.872 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:34.872 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:34.873 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-31 10:38:34.874 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:34.875 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:34.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:34.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input']\\n\",\n      \"23-07-31 10:38:34.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:34.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:34.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\\n\",\n      \"23-07-31 10:38:34.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:34.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:34.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:34.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output']\\n\",\n      \"23-07-31 10:38:34.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:38.873 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:38.873 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:38.874 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:38.874 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:38.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:38.876 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:38.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:38.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:38.878 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"from fastkafka._application.app import FastKafka\\n\",\n    \"from fastkafka._application.tester import Tester\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"app = FastKafka()\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_input(msg: TestMsg):\\n\",\n    \"    await to_output(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_output(msg: TestMsg) -> TestMsg:\\n\",\n    \"    return msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def async_tests():\\n\",\n    \"    async with Tester(app).using_inmemory_broker() as tester:\\n\",\n    \"        input_msg = TestMsg(msg=\\\"Mickey\\\")\\n\",\n    \"\\n\",\n    \"        # tester produces message to the input topic\\n\",\n    \"        await tester.to_input(input_msg)\\n\",\n    \"\\n\",\n    \"        # assert that app consumed from the input topic and it was called with the accurate argument\\n\",\n    \"        await app.awaited_mocks.on_input.assert_called_with(\\n\",\n    \"            TestMsg(msg=\\\"Mickey\\\"), timeout=5\\n\",\n    \"        )\\n\",\n    \"        # assert that tester consumed from the output topic and it was called with the accurate argument\\n\",\n    \"        await tester.awaited_mocks.on_output.assert_called_with(\\n\",\n    \"            TestMsg(msg=\\\"Hello Mickey\\\"), timeout=5\\n\",\n    \"        )\\n\",\n    \"    print(\\\"ok\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"if __name__ == \\\"__main__\\\":\\n\",\n    \"    loop = asyncio.get_event_loop()\\n\",\n    \"    loop.run_until_complete(async_tests())\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0ca15a23\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Using external brokers\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"7b91ccd0\",\n   \"metadata\": {},\n   \"source\": [\n    \"If you have already running brokers e.g. `kafka_brokers`, you can use [Tester](../../api/fastkafka/testing/Tester/) method `using_external_broker` to set  brokers which will be used in tests. \\n\",\n    \"\\n\",\n    \"The same example as previous but with external `kafka_brokers`:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"92988480\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"# content of the \\\"application_test.py\\\" file\\n\",\n       \"\\n\",\n       \"import asyncio\\n\",\n       \"from fastkafka._application.app import FastKafka\\n\",\n       \"from fastkafka._application.tester import Tester\\n\",\n       \"from pydantic import BaseModel, Field\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"class TestMsg(BaseModel):\\n\",\n       \"    msg: str = Field(...)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"    },\\n\",\n       \"    \\\"production\\\": {\\n\",\n       \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n       \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n       \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"app = FastKafka(\\n\",\n       \"    title=\\\"Demo Kafka app\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"@app.consumes()\\n\",\n       \"async def on_input(msg: TestMsg):\\n\",\n       \"    await to_output(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@app.produces()\\n\",\n       \"async def to_output(msg: TestMsg) -> TestMsg:\\n\",\n       \"    return msg\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"async def async_tests():\\n\",\n       \"    async with Tester(app).using_external_broker(bootstrap_servers_id=\\\"production\\\") as tester:\\n\",\n       \"        input_msg = TestMsg(msg=\\\"Mickey\\\")\\n\",\n       \"\\n\",\n       \"        # tester produces message to the input topic\\n\",\n       \"        await tester.to_input(input_msg)\\n\",\n       \"\\n\",\n       \"        # assert that app consumed from the input topic and it was called with the accurate argument\\n\",\n       \"        await app.awaited_mocks.on_input.assert_called_with(\\n\",\n       \"            TestMsg(msg=\\\"Mickey\\\"), timeout=5\\n\",\n       \"        )\\n\",\n       \"        # assert that tester consumed from the output topic and it was called with the accurate argument\\n\",\n       \"        await tester.awaited_mocks.on_output.assert_called_with(\\n\",\n       \"            TestMsg(msg=\\\"Hello Mickey\\\"), timeout=5\\n\",\n       \"        )\\n\",\n       \"    print(\\\"ok\\\")\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"if __name__ == \\\"__main__\\\":\\n\",\n       \"    loop = asyncio.get_event_loop()\\n\",\n       \"    loop.run_until_complete(async_tests())\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"kafka_app_source = \\\"\\\"\\\"\\n\",\n    \"import asyncio\\n\",\n    \"from fastkafka._application.app import FastKafka\\n\",\n    \"from fastkafka._application.tester import Tester\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class TestMsg(BaseModel):\\n\",\n    \"    msg: str = Field(...)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"app = FastKafka(\\n\",\n    \"    title=\\\"Demo Kafka app\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_input(msg: TestMsg):\\n\",\n    \"    await to_output(TestMsg(msg=f\\\"Hello {msg.msg}\\\"))\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_output(msg: TestMsg) -> TestMsg:\\n\",\n    \"    return msg\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def async_tests():\\n\",\n    \"    async with Tester(app).using_external_broker(bootstrap_servers_id=\\\"production\\\") as tester:\\n\",\n    \"        input_msg = TestMsg(msg=\\\"Mickey\\\")\\n\",\n    \"\\n\",\n    \"        # tester produces message to the input topic\\n\",\n    \"        await tester.to_input(input_msg)\\n\",\n    \"\\n\",\n    \"        # assert that app consumed from the input topic and it was called with the accurate argument\\n\",\n    \"        await app.awaited_mocks.on_input.assert_called_with(\\n\",\n    \"            TestMsg(msg=\\\"Mickey\\\"), timeout=5\\n\",\n    \"        )\\n\",\n    \"        # assert that tester consumed from the output topic and it was called with the accurate argument\\n\",\n    \"        await tester.awaited_mocks.on_output.assert_called_with(\\n\",\n    \"            TestMsg(msg=\\\"Hello Mickey\\\"), timeout=5\\n\",\n    \"        )\\n\",\n    \"    print(\\\"ok\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"if __name__ == \\\"__main__\\\":\\n\",\n    \"    loop = asyncio.get_event_loop()\\n\",\n    \"    loop.run_until_complete(async_tests())\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"with open(\\\"application_test.py\\\", \\\"w\\\") as source:\\n\",\n    \"    source.write(kafka_app_source)\\n\",\n    \"\\n\",\n    \"Markdown(\\n\",\n    \"    f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application_test.py\\\" file\\n\",\n    \"{kafka_app_source}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"97f3ea6b\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Example: New Employee app\\n\",\n    \"\\n\",\n    \"In this example, our `app` has one consumes and two produces functions.\\n\",\n    \"\\n\",\n    \"Every time a company hires an `Employee`, some employee data is sent to the `new_employee` topic.\\n\",\n    \"\\n\",\n    \"That's when our application comes into play! The app consumes this data by calling `on_new_employee`. Within this function, `to_employee_email` and `to_welcome_message` functions are called - and messages are produced to the `employee_email` and `welcome_message` topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"f7f51841\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"class Employee(BaseModel):\\n\",\n    \"    name: str\\n\",\n    \"    surname: str\\n\",\n    \"    email: Optional[str] = None\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class EmaiMessage(BaseModel):\\n\",\n    \"    sender: str = \\\"info@gmail.com\\\"\\n\",\n    \"    receiver: str\\n\",\n    \"    subject: str\\n\",\n    \"    message: str\\n\",\n    \"\\n\",\n    \"kafka_brokers = dict(localhost=[dict(url=\\\"server_1\\\", port=9092)], production=[dict(url=\\\"production_server_1\\\", port=9092)])\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_new_employee(msg: Employee):\\n\",\n    \"    employee = await to_employee_email(msg)\\n\",\n    \"    await to_welcome_message(employee)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_employee_email(employee: Employee) -> Employee:\\n\",\n    \"    # generate new email\\n\",\n    \"    employee.email = employee.name + \\\".\\\" + employee.surname + \\\"@gmail.com\\\"\\n\",\n    \"    return employee\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_welcome_message(employee: Employee) -> EmaiMessage:\\n\",\n    \"    message = f\\\"Dear {employee.name},\\\\nWelcome to the company\\\"\\n\",\n    \"    return EmaiMessage(receiver=employee.email, subject=\\\"Welcome\\\", message=message)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"7c7c6d45\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Testing the application\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"ddcbb2a0\",\n   \"metadata\": {},\n   \"source\": [\n    \"In this example `app` has imlemented `on_new_employee`, `to_employee_email` and `to_welcome_message` functions. We can now use [Tester](../../api/fastkafka/testing/Tester/) to create their mirrored functions: `to_new_employee`, `on_employee_email` and `on_welcome_message`. \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"15592404\",\n   \"metadata\": {},\n   \"source\": [\n    \"Testing process:\\n\",\n    \"\\n\",\n    \"1. `tester` produces message to the `new_employee` topic\\n\",\n    \"\\n\",\n    \"2. Assert that the `app` consumed the message from the `new_employee` topic with the accurate argument\\n\",\n    \"\\n\",\n    \"3. Within `on_new_employee` function, `to_employee_email` and `to_welcome_message` functions are called - and messages are produced to the `employee_email` and `welcome_message` topic\\n\",\n    \"\\n\",\n    \"4. Assert that the `tester` consumed the message by calling `on_employee_email`\\n\",\n    \"\\n\",\n    \"5. Assert that the `tester` consumed the message by calling `on_welcome_message`\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"fc0d7c25\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-07-31 10:38:40.069 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-07-31 10:38:40.070 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-07-31 10:38:40.070 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\\n\",\n      \"23-07-31 10:38:40.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:40.071 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\\n\",\n      \"23-07-31 10:38:40.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:40.091 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\\n\",\n      \"23-07-31 10:38:40.091 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:40.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:40.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\\n\",\n      \"23-07-31 10:38:40.093 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:40.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:40.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:40.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['new_employee']\\n\",\n      \"23-07-31 10:38:40.095 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:40.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:40.097 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\\n\",\n      \"23-07-31 10:38:40.098 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:40.099 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:40.099 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:40.100 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['employee_email']\\n\",\n      \"23-07-31 10:38:40.100 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:40.101 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:40.101 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\\n\",\n      \"23-07-31 10:38:40.102 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:40.103 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:40.103 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:40.103 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['welcome_message']\\n\",\n      \"23-07-31 10:38:40.104 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:44.092 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:44.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:44.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:44.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:44.094 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:44.095 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:44.095 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:44.096 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:44.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:44.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"localhost\\\"\\n\",\n    \"\\n\",\n    \"async with Tester(app).using_inmemory_broker(bootstrap_servers_id=\\\"production\\\") as tester:\\n\",\n    \"    assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"production\\\"\\n\",\n    \"    assert tester._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"production\\\"\\n\",\n    \"\\n\",\n    \"    # produce the message to new_employee topic\\n\",\n    \"    await tester.to_new_employee(Employee(name=\\\"Mickey\\\", surname=\\\"Mouse\\\"))\\n\",\n    \"    # previous line is equal to:\\n\",\n    \"    # await tester.mirrors[app.on_new_employee](Employee(name=\\\"Mickey\\\", surname=\\\"Mouse\\\"))\\n\",\n    \"\\n\",\n    \"    # Assert app consumed the message\\n\",\n    \"    await app.awaited_mocks.on_new_employee.assert_called_with(\\n\",\n    \"        Employee(name=\\\"Mickey\\\", surname=\\\"Mouse\\\"), timeout=5\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"    # If the the previous assert is true (on_new_employee was called),\\n\",\n    \"    # to_employee_email and to_welcome_message were called inside on_new_employee function\\n\",\n    \"\\n\",\n    \"    # Now we can check if this two messages were consumed\\n\",\n    \"    await tester.awaited_mocks.on_employee_email.assert_called(timeout=5)\\n\",\n    \"    await tester.awaited_mocks.on_welcome_message.assert_called(timeout=5)\\n\",\n    \"\\n\",\n    \"assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"localhost\\\"\\n\",\n    \"\\n\",\n    \"print(\\\"ok\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"547d3632\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Final script\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"9da1c56f\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-07-31 10:38:47.045 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"23-07-31 10:38:47.046 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"23-07-31 10:38:47.046 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\\n\",\n      \"23-07-31 10:38:47.047 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:47.048 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\\n\",\n      \"23-07-31 10:38:47.048 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:47.067 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\\n\",\n      \"23-07-31 10:38:47.067 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"23-07-31 10:38:47.068 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:47.070 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\\n\",\n      \"23-07-31 10:38:47.070 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:47.071 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:47.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:47.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['new_employee']\\n\",\n      \"23-07-31 10:38:47.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:47.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:47.073 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\\n\",\n      \"23-07-31 10:38:47.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:47.074 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:47.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:47.075 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['employee_email']\\n\",\n      \"23-07-31 10:38:47.075 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:47.076 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"23-07-31 10:38:47.076 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\\n\",\n      \"23-07-31 10:38:47.076 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"23-07-31 10:38:47.077 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"23-07-31 10:38:47.077 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"23-07-31 10:38:47.078 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['welcome_message']\\n\",\n      \"23-07-31 10:38:47.078 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"23-07-31 10:38:51.068 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:51.069 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:51.069 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:51.070 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:51.070 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:51.071 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:51.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:51.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"23-07-31 10:38:51.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"23-07-31 10:38:51.073 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-07-31 10:38:51.073 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:51.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"23-07-31 10:38:51.074 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\",\n      \"ok\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"\\n\",\n    \"import asyncio\\n\",\n    \"from fastkafka._application.app import FastKafka\\n\",\n    \"from fastkafka._application.tester import Tester\\n\",\n    \"from pydantic import BaseModel, Field\\n\",\n    \"from typing import Optional\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class Employee(BaseModel):\\n\",\n    \"    name: str\\n\",\n    \"    surname: str\\n\",\n    \"    email: Optional[str] = None\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class EmaiMessage(BaseModel):\\n\",\n    \"    sender: str = \\\"info@gmail.com\\\"\\n\",\n    \"    receiver: str\\n\",\n    \"    subject: str\\n\",\n    \"    message: str\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"kafka_brokers = dict(localhost=[dict(url=\\\"server_1\\\", port=9092)], production=[dict(url=\\\"production_server_1\\\", port=9092)])\\n\",\n    \"app = FastKafka(kafka_brokers=kafka_brokers)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.consumes()\\n\",\n    \"async def on_new_employee(msg: Employee):\\n\",\n    \"    employee = await to_employee_email(msg)\\n\",\n    \"    await to_welcome_message(employee)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_employee_email(employee: Employee) -> Employee:\\n\",\n    \"    # generate new email\\n\",\n    \"    employee.email = employee.name + \\\".\\\" + employee.surname + \\\"@gmail.com\\\"\\n\",\n    \"    return employee\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@app.produces()\\n\",\n    \"async def to_welcome_message(employee: Employee) -> EmaiMessage:\\n\",\n    \"    message = f\\\"Dear {employee.name},\\\\nWelcome to the company\\\"\\n\",\n    \"    return EmaiMessage(receiver=employee.email, subject=\\\"Welcome\\\", message=message)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"async def async_tests():\\n\",\n    \"    assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"localhost\\\"\\n\",\n    \"    \\n\",\n    \"    async with Tester(app).using_inmemory_broker(bootstrap_servers_id=\\\"production\\\") as tester:\\n\",\n    \"        assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"production\\\"\\n\",\n    \"        assert tester._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"production\\\"\\n\",\n    \"    \\n\",\n    \"        # produce the message to new_employee topic\\n\",\n    \"        await tester.to_new_employee(Employee(name=\\\"Mickey\\\", surname=\\\"Mouse\\\"))\\n\",\n    \"        # previous line is equal to:\\n\",\n    \"        # await tester.mirrors[app.on_new_employee](Employee(name=\\\"Mickey\\\", surname=\\\"Mouse\\\"))\\n\",\n    \"\\n\",\n    \"        # Assert app consumed the message\\n\",\n    \"        await app.awaited_mocks.on_new_employee.assert_called_with(\\n\",\n    \"            Employee(name=\\\"Mickey\\\", surname=\\\"Mouse\\\"), timeout=5\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"        # If the the previous assert is true (on_new_employee was called),\\n\",\n    \"        # to_employee_email and to_welcome_message were called inside on_new_employee function\\n\",\n    \"\\n\",\n    \"        # Now we can check if this two messages were consumed\\n\",\n    \"        await tester.awaited_mocks.on_employee_email.assert_called(timeout=5)\\n\",\n    \"        await tester.awaited_mocks.on_welcome_message.assert_called(timeout=5)\\n\",\n    \"    \\n\",\n    \"    assert app._kafka_config[\\\"bootstrap_servers_id\\\"] == \\\"localhost\\\"\\n\",\n    \"    print(\\\"ok\\\")\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"if __name__ == \\\"__main__\\\":\\n\",\n    \"    loop = asyncio.get_event_loop()\\n\",\n    \"    loop.run_until_complete(async_tests())\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"id\": \"dca285c3\",\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "nbs/index.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"from IPython.display import Markdown, display_markdown\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"try:\\n\",\n    \"    import google.colab\\n\",\n    \"    in_colab = True\\n\",\n    \"except:\\n\",\n    \"    in_colab = False\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"### If you see this message, you are running in Google Colab\\n\",\n       \"Along with this interactive tutorial the content of this notebook is organized and formatted for documentation purpuoses. \\n\",\n       \"\\n\",\n       \"You can ignore the '# | hide', '# | notest' and '# | echo: false' comments, they are not important for the tutorial.\\n\",\n       \"    \"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"\\n\",\n    \"if in_colab:\\n\",\n    \"    display(Markdown(\\\"\\\"\\\"\\n\",\n    \"### If you see this message, you are running in Google Colab\\n\",\n    \"Along with this interactive tutorial the content of this notebook is organized and formatted for documentation purpuoses. \\n\",\n    \"\\n\",\n    \"You can ignore the '# | hide', '# | notest' and '# | echo: false' comments, they are not important for the tutorial.\\n\",\n    \"    \\\"\\\"\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"# This step is necessary to be able to run FastKafka inside Google Colab\\n\",\n    \"\\n\",\n    \"import nest_asyncio\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"nest_asyncio.apply()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# FastKafka\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"<b>Effortless Kafka integration for your web services</b>\\n\",\n    \"\\n\",\n    \"## Deprecation notice\\n\",\n    \"\\n\",\n    \"This project is superceeded by [FastStream](https://github.com/airtai/faststream).\\n\",\n    \"\\n\",\n    \"FastStream is a new package based on the ideas and experiences gained from\\n\",\n    \"[FastKafka](https://github.com/airtai/fastkafka) and\\n\",\n    \"[Propan](https://github.com/lancetnik/propan). By joining our forces, we\\n\",\n    \" picked up the best from both packages and created the unified way to write\\n\",\n    \"  services capable of processing streamed data regradless of the underliying protocol.\\n\",\n    \"\\n\",\n    \"  We'll continue to maintain FastKafka package, but new development will be in  [FastStream](https://github.com/airtai/faststream).\\n\",\n    \"If you are starting a new service,  [FastStream](https://github.com/airtai/faststream) is the recommended way to do it.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"---\\n\",\n    \"\\n\",\n    \"![PyPI](https://img.shields.io/pypi/v/fastkafka)\\n\",\n    \"![PyPI - Downloads](https://img.shields.io/pypi/dm/fastkafka)\\n\",\n    \"![PyPI - Python Version](https://img.shields.io/pypi/pyversions/fastkafka)\\n\",\n    \"\\n\",\n    \"![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml)\\n\",\n    \"![CodeQL](https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg)\\n\",\n    \"![Dependency Review](https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg)\\n\",\n    \"\\n\",\n    \"![GitHub](https://img.shields.io/github/license/airtai/fastkafka)\\n\",\n    \"\\n\",\n    \"---\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"[FastKafka](https://fastkafka.airt.ai/) is a powerful and easy-to-use Python library for building asynchronous services that interact with Kafka topics. Built on top of [Pydantic](https://docs.pydantic.dev/), [AIOKafka](https://github.com/aio-libs/aiokafka) and [AsyncAPI](https://www.asyncapi.com/), FastKafka simplifies the process of writing producers and consumers for Kafka topics, handling all the parsing, networking, task scheduling and data generation automatically. With FastKafka, you can quickly prototype and develop high-performance Kafka-based services with minimal code, making it an ideal choice for developers looking to streamline their workflow and accelerate their projects.\\n\",\n    \"\\n\",\n    \"---\\n\",\n    \"\\n\",\n    \"#### ⭐⭐⭐ Stay in touch ⭐⭐⭐\\n\",\n    \"\\n\",\n    \"Please show your support and stay in touch by:\\n\",\n    \"\\n\",\n    \"- giving our [GitHub repository](https://github.com/airtai/fastkafka/) a star, and\\n\",\n    \"\\n\",\n    \"- joining our [Discord server](https://discord.gg/CJWmYpyFbc).\\n\",\n    \"\\n\",\n    \"Your support helps us to stay in touch with you and encourages us to continue developing and improving the library. Thank you for your support!\\n\",\n    \"\\n\",\n    \"---\\n\",\n    \"\\n\",\n    \"#### 🐝🐝🐝 We were busy lately 🐝🐝🐝\\n\",\n    \"\\n\",\n    \"![Activity](https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg \\\"Repobeats analytics image\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Install\\n\",\n    \"\\n\",\n    \"FastKafka works on Windows, macOS, Linux, and most Unix-style operating systems. You can install base version of FastKafka with `pip` as usual:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"pip install fastkafka\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"To install FastKafka with testing features please use:\\n\",\n    \"```sh\\n\",\n    \"pip install fastkafka[test]\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"To install FastKafka with asyncapi docs please use:\\n\",\n    \"```sh\\n\",\n    \"pip install fastkafka[docs]\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"To install FastKafka with all the features please use:\\n\",\n    \"```sh\\n\",\n    \"pip install fastkafka[test,docs]\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\\n\",\n      \"Collecting fastkafka[docs,test]\\n\",\n      \"  Downloading fastkafka-0.6.1-py3-none-any.whl (91 kB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m91.1/91.1 kB\\u001b[0m \\u001b[31m5.5 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hCollecting aiokafka>=0.8.0 (from fastkafka[docs,test])\\n\",\n      \"  Downloading aiokafka-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.1 MB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m1.1/1.1 MB\\u001b[0m \\u001b[31m33.6 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hRequirement already satisfied: anyio>=3.0 in /usr/local/lib/python3.10/dist-packages (from fastkafka[docs,test]) (3.6.2)\\n\",\n      \"Collecting asyncer>=0.0.2 (from fastkafka[docs,test])\\n\",\n      \"  Downloading asyncer-0.0.2-py3-none-any.whl (8.3 kB)\\n\",\n      \"Collecting docstring-parser>=0.15 (from fastkafka[docs,test])\\n\",\n      \"  Downloading docstring_parser-0.15-py3-none-any.whl (36 kB)\\n\",\n      \"Requirement already satisfied: nest-asyncio>=1.5.6 in /usr/local/lib/python3.10/dist-packages (from fastkafka[docs,test]) (1.5.6)\\n\",\n      \"Requirement already satisfied: pydantic>=1.9 in /usr/local/lib/python3.10/dist-packages (from fastkafka[docs,test]) (1.10.7)\\n\",\n      \"Requirement already satisfied: tqdm>=4.62 in /usr/local/lib/python3.10/dist-packages (from fastkafka[docs,test]) (4.65.0)\\n\",\n      \"Requirement already satisfied: typer>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from fastkafka[docs,test]) (0.7.0)\\n\",\n      \"Collecting install-jdk==0.3.0 (from fastkafka[docs,test])\\n\",\n      \"  Downloading install-jdk-0.3.0.tar.gz (3.8 kB)\\n\",\n      \"  Preparing metadata (setup.py) ... \\u001b[?25l\\u001b[?25hdone\\n\",\n      \"Collecting ipywidgets<=8.0.4,>=8.0 (from fastkafka[docs,test])\\n\",\n      \"  Downloading ipywidgets-8.0.4-py3-none-any.whl (137 kB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m137.8/137.8 kB\\u001b[0m \\u001b[31m14.1 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hRequirement already satisfied: requests>=2.20 in /usr/local/lib/python3.10/dist-packages (from fastkafka[docs,test]) (2.27.1)\\n\",\n      \"Requirement already satisfied: PyYAML>=5.3.1 in /usr/local/lib/python3.10/dist-packages (from fastkafka[docs,test]) (6.0)\\n\",\n      \"Collecting aiohttp>=3.8.4 (from fastkafka[docs,test])\\n\",\n      \"  Downloading aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.0 MB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m1.0/1.0 MB\\u001b[0m \\u001b[31m51.2 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hRequirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp>=3.8.4->fastkafka[docs,test]) (23.1.0)\\n\",\n      \"Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp>=3.8.4->fastkafka[docs,test]) (2.0.12)\\n\",\n      \"Collecting multidict<7.0,>=4.5 (from aiohttp>=3.8.4->fastkafka[docs,test])\\n\",\n      \"  Downloading multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (114 kB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m114.5/114.5 kB\\u001b[0m \\u001b[31m4.6 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hCollecting async-timeout<5.0,>=4.0.0a3 (from aiohttp>=3.8.4->fastkafka[docs,test])\\n\",\n      \"  Downloading async_timeout-4.0.2-py3-none-any.whl (5.8 kB)\\n\",\n      \"Collecting yarl<2.0,>=1.0 (from aiohttp>=3.8.4->fastkafka[docs,test])\\n\",\n      \"  Downloading yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (268 kB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m268.8/268.8 kB\\u001b[0m \\u001b[31m23.0 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hCollecting frozenlist>=1.1.1 (from aiohttp>=3.8.4->fastkafka[docs,test])\\n\",\n      \"  Downloading frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (149 kB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m149.6/149.6 kB\\u001b[0m \\u001b[31m15.5 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hCollecting aiosignal>=1.1.2 (from aiohttp>=3.8.4->fastkafka[docs,test])\\n\",\n      \"  Downloading aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\\n\",\n      \"Collecting kafka-python>=2.0.2 (from aiokafka>=0.8.0->fastkafka[docs,test])\\n\",\n      \"  Downloading kafka_python-2.0.2-py2.py3-none-any.whl (246 kB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m246.5/246.5 kB\\u001b[0m \\u001b[31m26.1 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hRequirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from aiokafka>=0.8.0->fastkafka[docs,test]) (23.1)\\n\",\n      \"Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.10/dist-packages (from anyio>=3.0->fastkafka[docs,test]) (3.4)\\n\",\n      \"Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.10/dist-packages (from anyio>=3.0->fastkafka[docs,test]) (1.3.0)\\n\",\n      \"Requirement already satisfied: ipykernel>=4.5.1 in /usr/local/lib/python3.10/dist-packages (from ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (5.5.6)\\n\",\n      \"Requirement already satisfied: ipython>=6.1.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (7.34.0)\\n\",\n      \"Requirement already satisfied: traitlets>=4.3.1 in /usr/local/lib/python3.10/dist-packages (from ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (5.7.1)\\n\",\n      \"Collecting widgetsnbextension~=4.0 (from ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test])\\n\",\n      \"  Downloading widgetsnbextension-4.0.7-py3-none-any.whl (2.1 MB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m2.1/2.1 MB\\u001b[0m \\u001b[31m74.6 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hRequirement already satisfied: jupyterlab-widgets~=3.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (3.0.7)\\n\",\n      \"Requirement already satisfied: typing-extensions>=4.2.0 in /usr/local/lib/python3.10/dist-packages (from pydantic>=1.9->fastkafka[docs,test]) (4.5.0)\\n\",\n      \"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->fastkafka[docs,test]) (1.26.15)\\n\",\n      \"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->fastkafka[docs,test]) (2022.12.7)\\n\",\n      \"Requirement already satisfied: click<9.0.0,>=7.1.1 in /usr/local/lib/python3.10/dist-packages (from typer>=0.7.0->fastkafka[docs,test]) (8.1.3)\\n\",\n      \"Requirement already satisfied: ipython-genutils in /usr/local/lib/python3.10/dist-packages (from ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (0.2.0)\\n\",\n      \"Requirement already satisfied: jupyter-client in /usr/local/lib/python3.10/dist-packages (from ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (6.1.12)\\n\",\n      \"Requirement already satisfied: tornado>=4.2 in /usr/local/lib/python3.10/dist-packages (from ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (6.3.1)\\n\",\n      \"Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (67.7.2)\\n\",\n      \"Collecting jedi>=0.16 (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test])\\n\",\n      \"  Downloading jedi-0.18.2-py2.py3-none-any.whl (1.6 MB)\\n\",\n      \"\\u001b[2K     \\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\\u001b[0m \\u001b[32m1.6/1.6 MB\\u001b[0m \\u001b[31m74.1 MB/s\\u001b[0m eta \\u001b[36m0:00:00\\u001b[0m\\n\",\n      \"\\u001b[?25hRequirement already satisfied: decorator in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (4.4.2)\\n\",\n      \"Requirement already satisfied: pickleshare in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (0.7.5)\\n\",\n      \"Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (3.0.38)\\n\",\n      \"Requirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (2.14.0)\\n\",\n      \"Requirement already satisfied: backcall in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (0.2.0)\\n\",\n      \"Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (0.1.6)\\n\",\n      \"Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (4.8.0)\\n\",\n      \"Requirement already satisfied: parso<0.9.0,>=0.8.0 in /usr/local/lib/python3.10/dist-packages (from jedi>=0.16->ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (0.8.3)\\n\",\n      \"Requirement already satisfied: ptyprocess>=0.5 in /usr/local/lib/python3.10/dist-packages (from pexpect>4.3->ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (0.7.0)\\n\",\n      \"Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=6.1.0->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (0.2.6)\\n\",\n      \"Requirement already satisfied: jupyter-core>=4.6.0 in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (5.3.0)\\n\",\n      \"Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (23.2.1)\\n\",\n      \"Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (2.8.2)\\n\",\n      \"Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.10/dist-packages (from jupyter-core>=4.6.0->jupyter-client->ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (3.3.0)\\n\",\n      \"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.1->jupyter-client->ipykernel>=4.5.1->ipywidgets<=8.0.4,>=8.0->fastkafka[docs,test]) (1.16.0)\\n\",\n      \"Building wheels for collected packages: install-jdk\\n\",\n      \"  Building wheel for install-jdk (setup.py) ... \\u001b[?25l\\u001b[?25hdone\\n\",\n      \"  Created wheel for install-jdk: filename=install_jdk-0.3.0-py3-none-any.whl size=3725 sha256=d5dad71fae09d32b4f9eeeb2f754a45ea49e7f948c48fc2184ccca9b5dd69dd1\\n\",\n      \"  Stored in directory: /root/.cache/pip/wheels/79/7a/47/9a4619174f7ca0f1068edb7a5412730a37365b6d183b0b3847\\n\",\n      \"Successfully built install-jdk\\n\",\n      \"Installing collected packages: kafka-python, widgetsnbextension, multidict, jedi, install-jdk, frozenlist, docstring-parser, async-timeout, yarl, asyncer, aiosignal, aiokafka, fastkafka, aiohttp, ipywidgets\\n\",\n      \"  Attempting uninstall: widgetsnbextension\\n\",\n      \"    Found existing installation: widgetsnbextension 3.6.4\\n\",\n      \"    Uninstalling widgetsnbextension-3.6.4:\\n\",\n      \"      Successfully uninstalled widgetsnbextension-3.6.4\\n\",\n      \"  Attempting uninstall: ipywidgets\\n\",\n      \"    Found existing installation: ipywidgets 7.7.1\\n\",\n      \"    Uninstalling ipywidgets-7.7.1:\\n\",\n      \"      Successfully uninstalled ipywidgets-7.7.1\\n\",\n      \"Successfully installed aiohttp-3.8.4 aiokafka-0.8.0 aiosignal-1.3.1 async-timeout-4.0.2 asyncer-0.0.2 docstring-parser-0.15 fastkafka-0.6.1 frozenlist-1.3.3 install-jdk-0.3.0 ipywidgets-8.0.4 jedi-0.18.2 kafka-python-2.0.2 multidict-6.0.4 widgetsnbextension-4.0.7 yarl-1.9.2\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | hide\\n\",\n    \"# | notest\\n\",\n    \"\\n\",\n    \"# Install FastKafka with testing and documentation dependencies\\n\",\n    \"\\n\",\n    \"! pip install fastkafka[test,docs]\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Tutorial\\n\",\n    \"\\n\",\n    \"You can start an interactive tutorial in Google Colab by clicking the button below:\\n\",\n    \"\\n\",\n    \"<a href=\\\"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb\\\" target=”_blank”>\\n\",\n    \"  <img src=\\\"https://colab.research.google.com/assets/colab-badge.svg\\\" alt=\\\"Open in Colab\\\" />\\n\",\n    \"</a>\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Writing server code\\n\",\n    \"\\n\",\n    \"To demonstrate FastKafka simplicity of using `@produces` and `@consumes` decorators, we will focus on a simple app.\\n\",\n    \"\\n\",\n    \"The app will consume JSON messages containing positive floats from one topic, log\\n\",\n    \"them, and then produce incremented values to another topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Messages\\n\",\n    \"\\n\",\n    \"FastKafka uses [Pydantic](https://docs.pydantic.dev/) to parse input JSON-encoded data into Python objects, making it easy to work with structured data in your Kafka-based applications. Pydantic's [`BaseModel`](https://docs.pydantic.dev/usage/models/) class allows you to define messages using a declarative syntax, making it easy to specify the fields and types of your messages.\\n\",\n    \"\\n\",\n    \"This example defines one `Data` mesage class. This Class will model the consumed and produced data in our app demo, it contains one `NonNegativeFloat` field `data` that will be logged and \\\"processed\\\" before being produced to another topic.\\n\",\n    \"\\n\",\n    \"These message class will be used to parse and validate incoming data in Kafka consumers and producers.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"class Data(BaseModel):\\n\",\n    \"    data: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Float data example\\\"\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Application\\n\",\n    \"\\n\",\n    \"This example shows how to initialize a FastKafka application.\\n\",\n    \"\\n\",\n    \"It starts by defining  a dictionary called `kafka_brokers`, which contains two entries: `\\\"localhost\\\"` and `\\\"production\\\"`, specifying local development and production Kafka brokers. Each entry specifies the URL, port, and other details of a Kafka broker. This dictionary is used for both generating the documentation and later to run the actual server against one of the given kafka broker.\\n\",\n    \"\\n\",\n    \"Next, an object of the `FastKafka` class is initialized with the minimum set of arguments:\\n\",\n    \"\\n\",\n    \"- `kafka_brokers`: a dictionary used for generation of documentation\\n\",\n    \"\\n\",\n    \"We will also import and create a logger so that we can log the incoming data in our consuming function.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from logging import getLogger\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"logger = getLogger(\\\"Demo Kafka app\\\")\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Demo Kafka app\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Function decorators\\n\",\n    \"\\n\",\n    \"FastKafka provides convenient function decorators `@kafka_app.consumes` and `@kafka_app.produces` to allow you to delegate the actual process of\\n\",\n    \"\\n\",\n    \"- consuming and producing data to Kafka, and\\n\",\n    \"\\n\",\n    \"- decoding and encoding JSON messages\\n\",\n    \"\\n\",\n    \"from user defined functions to the framework. The FastKafka framework delegates these jobs to AIOKafka and Pydantic libraries.\\n\",\n    \"\\n\",\n    \"These decorators make it easy to specify the processing logic for your Kafka consumers and producers, allowing you to focus on the core business logic of your application without worrying about the underlying Kafka integration.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"This following example shows how to use the `@kafka_app.consumes` and `@kafka_app.produces` decorators in a FastKafka application:\\n\",\n    \"\\n\",\n    \"- The `@kafka_app.consumes` decorator is applied to the `on_input_data` function, which specifies that this function should be called whenever a message is received on the \\\"input_data\\\" Kafka topic. The `on_input_data` function takes a single argument which is expected to be an instance of the `Data` message class. Specifying the type of the single argument is instructing the Pydantic to use `Data.parse_raw()` on the consumed message before passing it to the user defined function `on_input_data`.\\n\",\n    \"\\n\",\n    \"- The `@produces` decorator is applied to the `to_output_data` function, which specifies that this function should produce a message to the \\\"output_data\\\" Kafka topic whenever it is called. The `to_output_data` function takes a single float argument `data`. It it increments the data returns it wrapped in a `Data` object. The framework will call the `Data.json().encode(\\\"utf-8\\\")` function on the returned value and produce it to the specified topic.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: Data):\\n\",\n    \"    logger.info(f\\\"Got data: {msg.data}\\\")\\n\",\n    \"    await to_output_data(msg.data)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"output_data\\\")\\n\",\n    \"async def to_output_data(data: float) -> Data:\\n\",\n    \"    processed_data = Data(data=data+1.0)\\n\",\n    \"    return processed_data\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Testing the service\\n\",\n    \"\\n\",\n    \"The service can be tested using the `Tester` instances which internally starts InMemory implementation of Kafka broker.\\n\",\n    \"\\n\",\n    \"The Tester will redirect your consumes and produces decorated functions to the InMemory Kafka broker so that you can quickly test your app without the need for a running Kafka broker and all its dependencies.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[INFO] Demo Kafka app: Got data: 0.1\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\\n\",\n      \"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from fastkafka.testing import Tester\\n\",\n    \"\\n\",\n    \"msg = Data(\\n\",\n    \"    data=0.1,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"# Start Tester app and create InMemory Kafka broker for testing\\n\",\n    \"async with Tester(kafka_app) as tester:\\n\",\n    \"    # Send Data message to input_data topic\\n\",\n    \"    await tester.to_input_data(msg)\\n\",\n    \"\\n\",\n    \"    # Assert that the kafka_app responded with incremented data in output_data topic\\n\",\n    \"    await tester.awaited_mocks.on_output_data.assert_awaited_with(\\n\",\n    \"        Data(data=1.1), timeout=2\\n\",\n    \"    )\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Recap\\n\",\n    \"\\n\",\n    \"We have created a simple FastKafka application.\\n\",\n    \"The app will consume the `Data` from the `input_data` topic, log it and produce the incremented data to `output_data` topic.\\n\",\n    \"\\n\",\n    \"To test the app we have:\\n\",\n    \"\\n\",\n    \"1. Created the app\\n\",\n    \"\\n\",\n    \"2. Started our Tester class which mirrors the developed app topics for testing purposes\\n\",\n    \"\\n\",\n    \"3. Sent Data message to `input_data` topic\\n\",\n    \"\\n\",\n    \"4. Asserted and checked that the developed service has reacted to Data message \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Running the service\\n\",\n    \"\\n\",\n    \"The service can be started using builtin faskafka run CLI command. Before we can do that, we will concatenate the code snippets from above and save them in a file `\\\"application.py\\\"`\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"```python\\n\",\n       \"# content of the \\\"application.py\\\" file\\n\",\n       \"\\n\",\n       \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n       \"\\n\",\n       \"from fastkafka import FastKafka\\n\",\n       \"from fastkafka._components.logger import get_logger\\n\",\n       \"\\n\",\n       \"logger = get_logger(__name__)\\n\",\n       \"\\n\",\n       \"class Data(BaseModel):\\n\",\n       \"    data: NonNegativeFloat = Field(\\n\",\n       \"        ..., example=0.5, description=\\\"Float data example\\\"\\n\",\n       \"    )\\n\",\n       \"\\n\",\n       \"kafka_brokers = {\\n\",\n       \"    \\\"localhost\\\": {\\n\",\n       \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n       \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"    },\\n\",\n       \"    \\\"production\\\": {\\n\",\n       \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n       \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n       \"        \\\"port\\\": 9092,\\n\",\n       \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n       \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n       \"    },\\n\",\n       \"}\\n\",\n       \"\\n\",\n       \"kafka_app = FastKafka(\\n\",\n       \"    title=\\\"Demo Kafka app\\\",\\n\",\n       \"    kafka_brokers=kafka_brokers,\\n\",\n       \")\\n\",\n       \"\\n\",\n       \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n       \"async def on_input_data(msg: Data):\\n\",\n       \"    logger.info(f\\\"Got data: {msg.data}\\\")\\n\",\n       \"    await to_output_data(msg.data)\\n\",\n       \"\\n\",\n       \"\\n\",\n       \"@kafka_app.produces(topic=\\\"output_data\\\")\\n\",\n       \"async def to_output_data(data: float) -> Data:\\n\",\n       \"    processed_data = Data(data=data+1.0)\\n\",\n       \"    return processed_data\\n\",\n       \"\\n\",\n       \"```\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"kafka_app_source = \\\"\\\"\\\"\\n\",\n    \"from pydantic import BaseModel, Field, NonNegativeFloat\\n\",\n    \"\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"from fastkafka._components.logger import get_logger\\n\",\n    \"\\n\",\n    \"logger = get_logger(__name__)\\n\",\n    \"\\n\",\n    \"class Data(BaseModel):\\n\",\n    \"    data: NonNegativeFloat = Field(\\n\",\n    \"        ..., example=0.5, description=\\\"Float data example\\\"\\n\",\n    \"    )\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Demo Kafka app\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"@kafka_app.consumes(topic=\\\"input_data\\\", auto_offset_reset=\\\"latest\\\")\\n\",\n    \"async def on_input_data(msg: Data):\\n\",\n    \"    logger.info(f\\\"Got data: {msg.data}\\\")\\n\",\n    \"    await to_output_data(msg.data)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"@kafka_app.produces(topic=\\\"output_data\\\")\\n\",\n    \"async def to_output_data(data: float) -> Data:\\n\",\n    \"    processed_data = Data(data=data+1.0)\\n\",\n    \"    return processed_data\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \"\\n\",\n    \"with open(\\\"application.py\\\", \\\"w\\\") as source:\\n\",\n    \"    source.write(kafka_app_source)\\n\",\n    \"\\n\",\n    \"Markdown(\\n\",\n    \"    f\\\"\\\"\\\"\\n\",\n    \"```python\\n\",\n    \"# content of the \\\"application.py\\\" file\\n\",\n    \"{kafka_app_source}\\n\",\n    \"```\\n\",\n    \"\\\"\\\"\\\"\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-31 11:36:20.813 [INFO] fastkafka._components.test_dependencies: Installing Java...\\n\",\n      \"23-05-31 11:36:20.813 [INFO] fastkafka._components.test_dependencies:  - installing jdk...\\n\",\n      \"23-05-31 11:36:29.252 [INFO] fastkafka._components.test_dependencies:  - jdk path: /root/.jdk/jdk-11.0.19+7\\n\",\n      \"23-05-31 11:36:29.252 [INFO] fastkafka._components.test_dependencies: Java installed.\\n\",\n      \"23-05-31 11:36:29.356 [INFO] fastkafka._components.test_dependencies: Installing Kafka...\\n\",\n      \"832969it [00:05, 163016.44it/s]                \\n\",\n      \"23-05-31 11:36:35.571 [INFO] fastkafka._components.test_dependencies: Kafka installed in /root/.local/kafka_2.13-3.3.2.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"# We need to start a local Apache Kafka broker so that our service can connect to it\\n\",\n    \"\\n\",\n    \"# To do that, we need JRE and Kafka toolkit installed.\\n\",\n    \"# We can do the installation by running \\\"fastkafka testing install_deps\\\"\\n\",\n    \"\\n\",\n    \"! fastkafka testing install_deps\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\\n\",\n      \"[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Java is already installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: Kafka is installed.\\n\",\n      \"[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"'127.0.0.1:9092'\"\n      ]\n     },\n     \"execution_count\": null,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"# Start the local Apache Kafka broker\\n\",\n    \"\\n\",\n    \"from fastkafka.testing import ApacheKafkaBroker\\n\",\n    \"\\n\",\n    \"broker = ApacheKafkaBroker(apply_nest_asyncio=True)\\n\",\n    \"\\n\",\n    \"broker.start()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"To run the service, use the FastKafka CLI command and pass the module (in this case, the file where the app implementation is located) and the app simbol to the command.\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\\n\",\n    \"```\\n\",\n    \"\\n\",\n    \"After running the command, you should see the following output in your command line:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"**The cell blow will run your app indefinitely, you will need to stop it manually**\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"if in_colab:\\n\",\n    \"    display(Markdown(f\\\"\\\"\\\"\\n\",\n    \"**The cell blow will run your app indefinitely, you will need to stop it manually**\\n\",\n    \"\\\"\\\"\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\\n\",\n      \"[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\\n\",\n      \"[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\\n\",\n      \"[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\\n\",\n      \"[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\\n\",\n      \"[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\\n\",\n      \"[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\\n\",\n      \"[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\\n\",\n      \"[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\\n\",\n      \"[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\\n\",\n      \"[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\\n\",\n      \"[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\\n\",\n      \"[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\\n\",\n      \"[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \\n\",\n      \"[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\\n\",\n      \"[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \\n\",\n      \"[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\\n\",\n      \"[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\\n\",\n      \"[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\\n\",\n      \"[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\\n\",\n      \"[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"Starting process cleanup, this may take a few seconds...\\n\",\n      \"23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\\n\",\n      \"23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\\n\",\n      \"[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\\n\",\n      \"[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\\n\",\n      \"23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\\n\",\n      \"23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"# This cell will run your app indefinitely, you need to stop it manually\\n\",\n    \"\\n\",\n    \"!fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1068...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1068 was already terminated.\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 697...\\n\",\n      \"[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 697 was already terminated.\\n\",\n      \"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"# Stop the local broker to keep the runtime clean\\n\",\n    \"\\n\",\n    \"broker.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Documentation\\n\",\n    \"\\n\",\n    \"The kafka app comes with builtin documentation generation using [AsyncApi HTML generator](https://www.asyncapi.com/tools/generator).\\n\",\n    \"\\n\",\n    \"AsyncApi requires Node.js to be installed and we provide the following convenience command line for it:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka docs install_deps\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"# Update Node.js\\n\",\n       \"When running in Colab, we need to update Node.js first:\\n\",\n       \"    \"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"if in_colab:\\n\",\n    \"    display(Markdown(\\\"\\\"\\\"\\n\",\n    \"# Update Node.js\\n\",\n    \"When running in Colab, we need to update Node.js first:\\n\",\n    \"    \\\"\\\"\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\u001b[K\\u001b[?25h/tools/node/bin/n -> /tools/node/lib/node_modules/n/bin/n\\n\",\n      \"+ n@9.1.0\\n\",\n      \"added 1 package from 2 contributors in 0.489s\\n\",\n      \"  \\u001b[36minstalling\\u001b[0m : \\u001b[2mnode-v18.16.0\\u001b[0m\\n\",\n      \"  \\u001b[36m     mkdir\\u001b[0m : \\u001b[2m/usr/local/n/versions/node/18.16.0\\u001b[0m\\n\",\n      \"  \\u001b[36m     fetch\\u001b[0m : \\u001b[2mhttps://nodejs.org/dist/v18.16.0/node-v18.16.0-linux-x64.tar.xz\\u001b[0m\\n\",\n      \"######################################################################## 100.0%\\n\",\n      \"\\u001b[1A\\u001b[2K  \\u001b[36m   copying\\u001b[0m : \\u001b[2mnode/18.16.0\\u001b[0m\\n\",\n      \"  \\u001b[36m installed\\u001b[0m : \\u001b[2mv18.16.0 (with npm 9.5.1)\\u001b[0m\\n\",\n      \"\\n\",\n      \"Note: the node command changed location and the old location may be remembered in your current shell.\\n\",\n      \"  \\u001b[36m       old\\u001b[0m : \\u001b[2m/tools/node/bin/node\\u001b[0m\\n\",\n      \"  \\u001b[36m       new\\u001b[0m : \\u001b[2m/usr/local/bin/node\\u001b[0m\\n\",\n      \"If \\\"node --version\\\" shows the old version then start a new shell, or reset the location hash with:\\n\",\n      \"hash -r  (for bash, zsh, ash, dash, and ksh)\\n\",\n      \"rehash   (for csh and tcsh)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"if in_colab:\\n\",\n    \"    !npm install -g n\\n\",\n    \"    !n lts\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! fastkafka docs install_deps\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"To generate the documentation programatically you just need to call the following command:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka docs generate application:kafka_app\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\\n\",\n      \"23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\\n\",\n      \"23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\\n\",\n      \"23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/content/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! fastkafka docs generate application:kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"This will generate the *asyncapi* folder in relative path where all your documentation will be saved. You can check out the content of it with:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"ls -l asyncapi\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"total 8\\n\",\n      \"drwxr-xr-x 4 root root 4096 May 31 11:38 docs\\n\",\n      \"drwxr-xr-x 2 root root 4096 May 31 11:38 spec\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"! ls -l asyncapi\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"In docs folder you will find the servable static html file of your documentation. This can also be served using our `fastkafka docs serve` CLI command (more on that in our guides).\\n\",\n    \"\\n\",\n    \"In spec folder you will find a asyncapi.yml file containing the async API specification of your application. \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"We can locally preview the generated documentation by running the following command:\\n\",\n    \"\\n\",\n    \"```sh\\n\",\n    \"fastkafka docs serve application:kafka_app\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"\\n\",\n       \"### Documentation link when runnining in Colab\\n\",\n       \"After running the cell below, when the cell outputs \\\"Serving documentation on http://127.0.0.1:8000\\\", you can acces your documentation with [this link](https://sd8jqq9m9u-496ff2e9c6d22116-8000-colab.googleusercontent.com/) \\n\",\n       \"\\n\",\n       \"**The cell below will serve docs indefinitely, you will need to stop it manually**\\n\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | hide\\n\",\n    \"\\n\",\n    \"if in_colab:\\n\",\n    \"    port_proxy = google.colab.output.eval_js(f\\\"google.colab.kernel.proxyPort({8000})\\\")\\n\",\n    \"\\n\",\n    \"    display(Markdown(f\\\"\\\"\\\"\\n\",\n    \"### Documentation link when runnining in Colab\\n\",\n    \"After running the cell below, when the cell outputs \\\"Serving documentation on http://127.0.0.1:8000\\\", you can acces your documentation with [this link]({port_proxy}) \\n\",\n    \"\\n\",\n    \"**The cell below will serve docs indefinitely, you will need to stop it manually**\\n\",\n    \"\\\"\\\"\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\\n\",\n      \"23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\\n\",\n      \"23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\\u001b[32m\\n\",\n      \"\\n\",\n      \"Done! ✨\\u001b[0m\\n\",\n      \"\\u001b[33mCheck out your shiny new generated files at \\u001b[0m\\u001b[35m/content/asyncapi/docs\\u001b[0m\\u001b[33m.\\u001b[0m\\n\",\n      \"\\n\",\n      \"\\n\",\n      \"Serving documentation on http://127.0.0.1:8000\\u001b[0m\\n\",\n      \"127.0.0.1 - - [31/May/2023 11:39:14] \\\"GET / HTTP/1.1\\\" 200 -\\n\",\n      \"127.0.0.1 - - [31/May/2023 11:39:14] \\\"GET /css/global.min.css HTTP/1.1\\\" 200 -\\n\",\n      \"127.0.0.1 - - [31/May/2023 11:39:14] \\\"GET /js/asyncapi-ui.min.js HTTP/1.1\\\" 200 -\\n\",\n      \"127.0.0.1 - - [31/May/2023 11:39:14] \\\"GET /css/asyncapi.min.css HTTP/1.1\\\" 200 -\\n\",\n      \"Interupting serving of documentation and cleaning up...\\u001b[0m\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# | notest\\n\",\n    \"# | echo: false\\n\",\n    \"\\n\",\n    \"# This cell will serve docs indefinitely, you need to stop it manually\\n\",\n    \"\\n\",\n    \"!fastkafka docs serve application:kafka_app\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"From the parameters passed to the application constructor, we get the documentation bellow:\\n\",\n    \"```python\\n\",\n    \"from fastkafka import FastKafka\\n\",\n    \"\\n\",\n    \"kafka_brokers = {\\n\",\n    \"    \\\"localhost\\\": {\\n\",\n    \"        \\\"url\\\": \\\"localhost\\\",\\n\",\n    \"        \\\"description\\\": \\\"local development kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"    },\\n\",\n    \"    \\\"production\\\": {\\n\",\n    \"        \\\"url\\\": \\\"kafka.airt.ai\\\",\\n\",\n    \"        \\\"description\\\": \\\"production kafka broker\\\",\\n\",\n    \"        \\\"port\\\": 9092,\\n\",\n    \"        \\\"protocol\\\": \\\"kafka-secure\\\",\\n\",\n    \"        \\\"security\\\": {\\\"type\\\": \\\"plain\\\"},\\n\",\n    \"    },\\n\",\n    \"}\\n\",\n    \"\\n\",\n    \"kafka_app = FastKafka(\\n\",\n    \"    title=\\\"Demo Kafka app\\\",\\n\",\n    \"    kafka_brokers=kafka_brokers,\\n\",\n    \")\\n\",\n    \"```\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"The following documentation snippet are for the consumer as specified in the code above:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"![Kafka_consumer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"The following documentation snippet are for the producer as specified in the code above:\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"![Kafka_producer](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"Finally, all messages as defined as subclasses of *BaseModel* are documented as well: \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"\\n\",\n    \"![Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## License\\n\",\n    \"\\n\",\n    \"FastKafka is licensed under the Apache License 2.0\\n\",\n    \"\\n\",\n    \"A permissive license whose main conditions require preservation of copyright and license notices. Contributors provide an express grant of patent rights. Licensed works, modifications, and larger works may be distributed under different terms and without source code.\\n\",\n    \"\\n\",\n    \"The full text of the license can be found [here](https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE).\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"python3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 1\n}\n"
  },
  {
    "path": "nbs/nbdev.yml",
    "content": "project:\n  output-dir: _docs\n\nwebsite:\n  title: \"fastkafka\"\n  site-url: \"https://airtai.github.io/fastkafka\"\n  description: \"FastKafka is a powerful and easy-to-use Python library for building asynchronous web services that interact with Kafka topics. Built on top of FastAPI, Starlette, Pydantic, AIOKafka and AsyncAPI, FastKafka simplifies the process of writing producers and consumers for Kafka topics.\"\n  repo-branch: main\n  repo-url: \"https://github.com/airtai/fastkafka\"\n"
  },
  {
    "path": "nbs/sidebar.yml",
    "content": "website:\n  sidebar:\n    contents:\n      - index.ipynb\n      \n      - section: Guides\n        contents:\n        - section: Writing services\n          contents:\n          - guides/Guide_11_Consumes_Basics.ipynb\n          - guides/Guide_12_Batch_Consuming.ipynb\n          - guides/Guide_21_Produces_Basics.ipynb\n          - guides/Guide_22_Partition_Keys.ipynb\n          - guides/Guide_23_Batch_Producing.ipynb\n          - guides/Guide_05_Lifespan_Handler.ipynb\n          - guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.ipynb\n          - guides/Guide_24_Using_Multiple_Kafka_Clusters.ipynb\n        - section: Testing\n          contents:\n          - guides/Guide_33_Using_Tester_class_to_test_fastkafka.ipynb \n          - guides/Guide_31_Using_redpanda_to_test_fastkafka.ipynb\n        - section: Documentation generation\n          contents:\n          - guides/Guide_04_Github_Actions_Workflow.ipynb\n        - section: Deployment\n          contents:          \n          - guides/Guide_30_Using_docker_to_deploy_fastkafka.ipynb   \n          - guides/Guide_32_Using_fastapi_to_run_fastkafka_application.ipynb\n        - section: Benchmarking\n          contents:          \n          - guides/Guide_06_Benchmarking_FastKafka.ipynb\n"
  },
  {
    "path": "nbs/styles.css",
    "content": ".cell {\n  margin-bottom: 1rem;\n}\n\n.cell > .sourceCode {\n  margin-bottom: 0;\n}\n\n.cell-output > pre {\n  margin-bottom: 0;\n}\n\n.cell-output > pre, .cell-output > .sourceCode > pre, .cell-output-stdout > pre {\n  margin-left: 0.8rem;\n  margin-top: 0;\n  background: none;\n  border-left: 2px solid lightsalmon;\n  border-top-left-radius: 0;\n  border-top-right-radius: 0;\n}\n\n.cell-output > .sourceCode {\n  border: none;\n}\n\n.cell-output > .sourceCode {\n  background: none;\n  margin-top: 0;\n}\n\ndiv.description {\n  padding-left: 2px;\n  padding-top: 5px;\n  font-style: italic;\n  font-size: 135%;\n  opacity: 70%;\n}\n"
  },
  {
    "path": "run_jupyter.sh",
    "content": "#!/bin/bash\n\nsource set_variables.sh\n\ndocker-compose -p $DOCKER_COMPOSE_PROJECT -f docker/dev.yml up -d --no-recreate\n\nsleep 10\n\ndocker logs $USER-fastkafka-devel 2>&1 | grep token\n"
  },
  {
    "path": "set_variables.sh",
    "content": "#!/bin/bash\nif test -z \"$AIRT_PROJECT\"; then\n      echo 'AIRT_PROJECT variable not set, setting to current directory'\n      export AIRT_PROJECT=`pwd`\nfi\necho AIRT_PROJECT variable set to $AIRT_PROJECT\n\nexport UID=$(id -u)\nexport GID=$(id -g)\n\nexport DOCKER_COMPOSE_PROJECT=\"${USER}-fastkafka\"\necho DOCKER_COMPOSE_PROJECT variable set to $DOCKER_COMPOSE_PROJECT\nexport KAFKA_HOSTNAME=\"${DOCKER_COMPOSE_PROJECT}-kafka-1\"\necho KAFKA_HOSTNAME variable set to $KAFKA_HOSTNAME\nexport PRESERVE_ENVS=\"KAFKA_HOSTNAME,KAFKA_PORT\"\n"
  },
  {
    "path": "settings.ini",
    "content": "[DEFAULT]\n# All sections below are required unless otherwise specified.\n# See https://github.com/fastai/nbdev/blob/master/settings.ini for examples.\n\n### Python library ###\nrepo = fastkafka\nlib_name = %(repo)s\nversion = 0.9.0rc0\nmin_python = 3.8\nlicense = apache2\n\n\n### nbdev ###\ndoc_path = _docs\nlib_path = fastkafka\nnbs_path = nbs\nrecursive = True\ntst_flags = notest\nput_version_in_init = True\nblack_formatting = True\ndocs_versioning = patch\n\n### Docs ###\nbranch = main\ncustom_sidebar = True\ndoc_host = https://%(user)s.github.io\ndoc_baseurl = /%(repo)s\ngit_url = https://github.com/%(user)s/%(repo)s\ntitle = %(lib_name)s\n\n### PyPI ###\naudience = Developers\nauthor = airt\nauthor_email = info@airt.ai\ncopyright = 2022 onwards, %(author)s\ndescription = FastKafka is a powerful and easy-to-use Python library for building asynchronous web services that interact with Kafka topics. Built on top of FastAPI, Starlette, Pydantic, AIOKafka and AsyncAPI, FastKafka simplifies the process of writing producers and consumers for Kafka topics.\nkeywords = nbdev jupyter notebook python kafka\nlanguage = English\nstatus = 4\nuser = airtai\n\nconsole_scripts = fastkafka=fastkafka._cli:_app\n    run_fastkafka_server_process=fastkafka._server:_app\n"
  },
  {
    "path": "setup.py",
    "content": "from pkg_resources import parse_version\nfrom configparser import ConfigParser\nimport setuptools\nassert parse_version(setuptools.__version__)>=parse_version('36.2') # nosec\n\n# note: all settings are in settings.ini; edit there, not here\nconfig = ConfigParser(delimiters=['='])\nconfig.read('settings.ini')\ncfg = config['DEFAULT']\n\ncfg_keys = 'version description keywords author author_email'.split()\nexpected = cfg_keys + \"lib_name user branch license status min_python audience language\".split()\nfor o in expected: assert o in cfg, \"missing expected setting: {}\".format(o) # nosec\nsetup_cfg = {o:cfg[o] for o in cfg_keys}\n\nlicenses = {\n    'apache2': ('Apache Software License 2.0','OSI Approved :: Apache Software License'),\n    'mit': ('MIT License', 'OSI Approved :: MIT License'),\n    'gpl2': ('GNU General Public License v2', 'OSI Approved :: GNU General Public License v2 (GPLv2)'),\n    'gpl3': ('GNU General Public License v3', 'OSI Approved :: GNU General Public License v3 (GPLv3)'),\n    'bsd3': ('BSD License', 'OSI Approved :: BSD License'),\n}\nstatuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha',\n    '4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ]\npy_versions = '3.6 3.7 3.8 3.9 3.10 3.11'.split()\n\nrequirements = [\n    \"pydantic>=2.0\",\n    \"anyio>=3.0\",\n    \"aiokafka>=0.8.0\",\n    \"asyncer>=0.0.2\",\n    \"tqdm>=4.62\",\n    \"docstring-parser>=0.15\",\n    \"typer>=0.7.0\",\n    \"nest-asyncio>=1.5.6\",\n    \"psutil>=5.9.5;platform_system=='Windows'\",\n]\navro_requirements = [\n    \"fastavro>=1.7.3\"\n]\ntest_requirements = [\n    \"install-jdk==0.3.0\",\n    \"ipywidgets>=8.0,<=8.0.4\",\n    \"requests>=2.20\",\n]\ndocs_requirements = [\n    \"PyYAML>=5.3.1\",\n    \"aiohttp>=3.8.4\"\n]\n\nmin_python = cfg['min_python']\nlic = licenses.get(cfg['license'].lower(), (cfg['license'], None))\n\ndev_requirements = [\n    \"nbconvert>=7.2.9\",\n    \"nbformat>=5.7.3\",\n    \"nbdev-mkdocs==0.6.0\",\n    \"mypy==1.3.0\",\n    \"pre-commit==3.3.1\",\n    \"nbqa==1.6.3\",\n    \"black==23.3.0\",\n    \"isort==5.12.0\",\n    \"bandit==1.7.5\",\n    \"semgrep==1.21.0\",\n    \"pytest==7.3.1\",\n    \"numpy>=1.21.0\",\n    \"pandas>=1.2.0\",\n    \"email-validator>=2.0.0\",\n    \"scikit-learn==1.2.1\",\n    \"ipython<8.13\",\n    \"fastapi>=0.100.0b2\",\n    \"uvicorn==0.22.0\",\n]\n\nproject_urls = {\n   'Bug Tracker': cfg['git_url'] + '/issues',\n   'CI': cfg['git_url'] + '/actions',\n   'Documentation': 'https://fastkafka.airt.ai/',\n#    'Source Code': cfg['git_url'],\n    'Tutorial': 'https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb'\n}\n\nsetuptools.setup(\n    name = cfg['lib_name'],\n    license = lic[0],\n    classifiers = [\n        'Development Status :: ' + statuses[int(cfg['status'])],\n        'Intended Audience :: ' + cfg['audience'].title(),\n        'Natural Language :: ' + cfg['language'].title(),\n    ] + ['Programming Language :: Python :: '+o for o in py_versions[py_versions.index(min_python):]] + (['License :: ' + lic[1] ] if lic[1] else []),\n    url = cfg['git_url'],\n    project_urls=project_urls,\n    packages = setuptools.find_packages(),\n    include_package_data = True,\n    install_requires = requirements,\n    extras_require={ 'dev': dev_requirements + avro_requirements + test_requirements + docs_requirements, \"avro\": avro_requirements, \"test\": test_requirements, \"docs\": docs_requirements },\n    dependency_links = cfg.get('dep_links','').split(),\n    python_requires  = '>=' + cfg['min_python'],\n    long_description = open('README.md', encoding=\"UTF-8\").read(),\n    long_description_content_type = 'text/markdown',\n    zip_safe = False,\n    entry_points = {\n        'console_scripts': cfg.get('console_scripts','').split(),\n        'nbdev': [f'{cfg.get(\"lib_path\")}={cfg.get(\"lib_path\")}._modidx:d']\n    },\n    **setup_cfg) # type: ignore\n"
  },
  {
    "path": "stop_jupyter.sh",
    "content": "#!/bin/bash\n\nsource set_variables.sh\n\ndocker-compose -p $DOCKER_COMPOSE_PROJECT -f docker/dev.yml down\n"
  }
]